[ 538.976812] env[70020]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=70020) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 538.977296] env[70020]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=70020) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 538.977296] env[70020]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=70020) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 538.977724] env[70020]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 539.078948] env[70020]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=70020) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:349}} [ 539.089202] env[70020]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=70020) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:372}} [ 539.134469] env[70020]: INFO oslo_service.periodic_task [-] Skipping periodic task _heal_instance_info_cache because its interval is negative [ 539.694048] env[70020]: INFO nova.virt.driver [None req-50014370-eab2-402f-b9a4-9fd3b851894b None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 539.763826] env[70020]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 539.763997] env[70020]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 539.764102] env[70020]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=70020) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 542.681340] env[70020]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-63387abb-c309-46cf-a2e7-dbe8e89a4fa3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.697807] env[70020]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=70020) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 542.698025] env[70020]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-fedb6e1c-9e73-4808-ab3c-1b299aa355c1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.731601] env[70020]: INFO oslo_vmware.api [-] Successfully established new session; session ID is f4061. [ 542.731792] env[70020]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 2.968s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 542.732305] env[70020]: INFO nova.virt.vmwareapi.driver [None req-50014370-eab2-402f-b9a4-9fd3b851894b None None] VMware vCenter version: 7.0.3 [ 542.735661] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39bd6fa3-9ee8-4ed5-afe7-17f2a90e05af {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.752802] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e996990-f181-400d-8b32-bc2452305b52 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.758730] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff368a49-d361-4d27-ae4f-d9cedd856703 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.765355] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bb6c1ff-9e45-4820-83ef-2a4f6ede39e8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.778282] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-113fdc04-1ff3-4bba-bf46-d5942cdb378f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.784193] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92e6525b-2e6b-4b12-b4d4-3310cd13e7f4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.814467] env[70020]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-9b439195-1022-471d-a6d7-57712402c879 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.819676] env[70020]: DEBUG nova.virt.vmwareapi.driver [None req-50014370-eab2-402f-b9a4-9fd3b851894b None None] Extension org.openstack.compute already exists. {{(pid=70020) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:228}} [ 542.822347] env[70020]: INFO nova.compute.provider_config [None req-50014370-eab2-402f-b9a4-9fd3b851894b None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 543.326067] env[70020]: DEBUG nova.context [None req-50014370-eab2-402f-b9a4-9fd3b851894b None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),eeee6c3d-1c23-4111-9f69-08669139bb51(cell1) {{(pid=70020) load_cells /opt/stack/nova/nova/context.py:464}} [ 543.328126] env[70020]: DEBUG oslo_concurrency.lockutils [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 543.328375] env[70020]: DEBUG oslo_concurrency.lockutils [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 543.329095] env[70020]: DEBUG oslo_concurrency.lockutils [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 543.329525] env[70020]: DEBUG oslo_concurrency.lockutils [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] Acquiring lock "eeee6c3d-1c23-4111-9f69-08669139bb51" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 543.329716] env[70020]: DEBUG oslo_concurrency.lockutils [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] Lock "eeee6c3d-1c23-4111-9f69-08669139bb51" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 543.330784] env[70020]: DEBUG oslo_concurrency.lockutils [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] Lock "eeee6c3d-1c23-4111-9f69-08669139bb51" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 543.350524] env[70020]: INFO dbcounter [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] Registered counter for database nova_cell0 [ 543.358855] env[70020]: INFO dbcounter [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] Registered counter for database nova_cell1 [ 543.809411] env[70020]: DEBUG oslo_db.sqlalchemy.engines [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=70020) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 543.809981] env[70020]: DEBUG oslo_db.sqlalchemy.engines [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=70020) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 543.817590] env[70020]: ERROR nova.db.main.api [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 543.817590] env[70020]: result = function(*args, **kwargs) [ 543.817590] env[70020]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 543.817590] env[70020]: return func(*args, **kwargs) [ 543.817590] env[70020]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 543.817590] env[70020]: result = fn(*args, **kwargs) [ 543.817590] env[70020]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 543.817590] env[70020]: return f(*args, **kwargs) [ 543.817590] env[70020]: File "/opt/stack/nova/nova/objects/service.py", line 557, in _db_service_get_minimum_version [ 543.817590] env[70020]: return db.service_get_minimum_version(context, binaries) [ 543.817590] env[70020]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 543.817590] env[70020]: _check_db_access() [ 543.817590] env[70020]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 543.817590] env[70020]: stacktrace = ''.join(traceback.format_stack()) [ 543.817590] env[70020]: [ 543.818658] env[70020]: ERROR nova.db.main.api [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 543.818658] env[70020]: result = function(*args, **kwargs) [ 543.818658] env[70020]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 543.818658] env[70020]: return func(*args, **kwargs) [ 543.818658] env[70020]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 543.818658] env[70020]: result = fn(*args, **kwargs) [ 543.818658] env[70020]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 543.818658] env[70020]: return f(*args, **kwargs) [ 543.818658] env[70020]: File "/opt/stack/nova/nova/objects/service.py", line 557, in _db_service_get_minimum_version [ 543.818658] env[70020]: return db.service_get_minimum_version(context, binaries) [ 543.818658] env[70020]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 543.818658] env[70020]: _check_db_access() [ 543.818658] env[70020]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 543.818658] env[70020]: stacktrace = ''.join(traceback.format_stack()) [ 543.818658] env[70020]: [ 543.819273] env[70020]: WARNING nova.objects.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 543.819460] env[70020]: WARNING nova.objects.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] Failed to get minimum service version for cell eeee6c3d-1c23-4111-9f69-08669139bb51 [ 543.820150] env[70020]: DEBUG oslo_concurrency.lockutils [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] Acquiring lock "singleton_lock" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 543.820408] env[70020]: DEBUG oslo_concurrency.lockutils [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] Acquired lock "singleton_lock" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 543.820797] env[70020]: DEBUG oslo_concurrency.lockutils [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] Releasing lock "singleton_lock" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 543.821310] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] Full set of CONF: {{(pid=70020) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/service.py:357}} [ 543.821548] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] ******************************************************************************** {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 543.821770] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] Configuration options gathered from: {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 543.821996] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 543.822335] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 543.822557] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] ================================================================================ {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 543.822898] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] allow_resize_to_same_host = True {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.823201] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] arq_binding_timeout = 300 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.823430] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] backdoor_port = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.823657] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] backdoor_socket = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.823939] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] block_device_allocate_retries = 60 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.824231] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] block_device_allocate_retries_interval = 3 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.824523] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cert = self.pem {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.824817] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.825122] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] compute_monitors = [] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.825409] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] config_dir = [] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.826053] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] config_drive_format = iso9660 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.826302] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.826594] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] config_source = [] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.826881] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] console_host = devstack {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.827187] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] control_exchange = nova {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.827473] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cpu_allocation_ratio = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.827763] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] daemon = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.828066] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] debug = True {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.828345] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] default_access_ip_network_name = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.828635] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] default_availability_zone = nova {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.828913] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] default_ephemeral_format = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.829220] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] default_green_pool_size = 1000 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.829637] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.829925] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] default_schedule_zone = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.830220] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] disk_allocation_ratio = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.830506] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] enable_new_services = True {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.830818] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] enabled_apis = ['osapi_compute'] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.831111] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] enabled_ssl_apis = [] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.831396] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] flat_injected = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.831673] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] force_config_drive = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.831944] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] force_raw_images = True {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.832242] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] graceful_shutdown_timeout = 5 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.832521] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] heal_instance_info_cache_interval = -1 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.832830] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] host = cpu-1 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.833028] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] initial_cpu_allocation_ratio = 4.0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.833203] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] initial_disk_allocation_ratio = 1.0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.833366] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] initial_ram_allocation_ratio = 1.0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.833581] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.833747] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] instance_build_timeout = 0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.833903] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] instance_delete_interval = 300 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.834083] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] instance_format = [instance: %(uuid)s] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.834244] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] instance_name_template = instance-%08x {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.834402] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] instance_usage_audit = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.834573] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] instance_usage_audit_period = month {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.834742] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.834907] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] instances_path = /opt/stack/data/nova/instances {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.835082] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] internal_service_availability_zone = internal {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.835240] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] key = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.835397] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] live_migration_retry_count = 30 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.835564] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] log_color = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.835729] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] log_config_append = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.835894] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.836066] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] log_dir = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.836225] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] log_file = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.836351] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] log_options = True {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.836508] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] log_rotate_interval = 1 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.836676] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] log_rotate_interval_type = days {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.836838] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] log_rotation_type = none {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.836966] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.837106] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.837272] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.837433] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.837560] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.837719] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] long_rpc_timeout = 1800 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.837875] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] max_concurrent_builds = 10 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.838038] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] max_concurrent_live_migrations = 1 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.838201] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] max_concurrent_snapshots = 5 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.838358] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] max_local_block_devices = 3 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.838512] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] max_logfile_count = 30 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.838667] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] max_logfile_size_mb = 200 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.838817] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] maximum_instance_delete_attempts = 5 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.838982] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] metadata_listen = 0.0.0.0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.839159] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] metadata_listen_port = 8775 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.839325] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] metadata_workers = 2 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.839482] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] migrate_max_retries = -1 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.839644] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] mkisofs_cmd = genisoimage {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.839845] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] my_block_storage_ip = 10.180.1.21 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.839971] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] my_ip = 10.180.1.21 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.840185] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.840343] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] network_allocate_retries = 0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.840526] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.840694] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] osapi_compute_listen = 0.0.0.0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.840851] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] osapi_compute_listen_port = 8774 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.841030] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] osapi_compute_unique_server_name_scope = {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.841196] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] osapi_compute_workers = 2 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.841354] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] password_length = 12 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.841512] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] periodic_enable = True {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.841668] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] periodic_fuzzy_delay = 60 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.841833] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] pointer_model = usbtablet {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.842045] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] preallocate_images = none {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.842155] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] publish_errors = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.842286] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] pybasedir = /opt/stack/nova {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.842437] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] ram_allocation_ratio = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.842593] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] rate_limit_burst = 0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.842755] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] rate_limit_except_level = CRITICAL {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.842907] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] rate_limit_interval = 0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.843073] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] reboot_timeout = 0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.843232] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] reclaim_instance_interval = 0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.843383] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] record = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.843549] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] reimage_timeout_per_gb = 60 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.843713] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] report_interval = 120 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.843867] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] rescue_timeout = 0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.844020] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] reserved_host_cpus = 0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.844182] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] reserved_host_disk_mb = 0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.844335] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] reserved_host_memory_mb = 512 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.844492] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] reserved_huge_pages = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.844647] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] resize_confirm_window = 0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.844802] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] resize_fs_using_block_device = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.844957] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] resume_guests_state_on_host_boot = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.845135] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.845297] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] rpc_response_timeout = 60 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.845454] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] run_external_periodic_tasks = True {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.845621] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] running_deleted_instance_action = reap {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.845779] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] running_deleted_instance_poll_interval = 1800 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.845933] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] running_deleted_instance_timeout = 0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.846099] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] scheduler_instance_sync_interval = 120 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.846267] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] service_down_time = 720 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.846433] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] servicegroup_driver = db {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.846589] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] shell_completion = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.846743] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] shelved_offload_time = 0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.846899] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] shelved_poll_interval = 3600 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.847075] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] shutdown_timeout = 0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.847236] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] source_is_ipv6 = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.847392] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] ssl_only = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.847651] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.847820] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] sync_power_state_interval = 600 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.847977] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] sync_power_state_pool_size = 1000 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.848159] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] syslog_log_facility = LOG_USER {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.848315] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] tempdir = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.848470] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] timeout_nbd = 10 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.848635] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] transport_url = **** {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.848790] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] update_resources_interval = 0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.848945] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] use_cow_images = True {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.849111] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] use_journal = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.849267] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] use_json = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.849423] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] use_rootwrap_daemon = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.849578] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] use_stderr = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.849730] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] use_syslog = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.849880] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vcpu_pin_set = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.850052] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vif_plugging_is_fatal = True {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.850220] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vif_plugging_timeout = 300 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.850379] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] virt_mkfs = [] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.850544] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] volume_usage_poll_interval = 0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.850704] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] watch_log_file = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.850868] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] web = /usr/share/spice-html5 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 543.851067] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.851236] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.851404] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] os_brick.wait_mpath_device_interval = 1 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.851573] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_concurrency.disable_process_locking = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.851883] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.852094] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.852241] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.852412] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_metrics.metrics_process_name = {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.852580] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.852749] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.852942] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api.auth_strategy = keystone {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.853122] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api.compute_link_prefix = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.853301] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.853476] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api.dhcp_domain = novalocal {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.853646] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api.enable_instance_password = True {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.853807] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api.glance_link_prefix = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.853970] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.854157] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api.instance_list_cells_batch_strategy = distributed {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.854320] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api.instance_list_per_project_cells = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.854478] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api.list_records_by_skipping_down_cells = True {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.854639] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api.local_metadata_per_cell = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.854804] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api.max_limit = 1000 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.854967] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api.metadata_cache_expiration = 15 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.855158] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api.neutron_default_tenant_id = default {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.855330] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api.response_validation = warn {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.855498] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api.use_neutron_default_nets = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.855665] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.855824] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api.vendordata_dynamic_failure_fatal = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.855986] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.856172] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api.vendordata_dynamic_ssl_certfile = {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.856341] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api.vendordata_dynamic_targets = [] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.856500] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api.vendordata_jsonfile_path = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.856681] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api.vendordata_providers = ['StaticJSON'] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.856874] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.backend = dogpile.cache.memcached {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.857049] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.backend_argument = **** {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.857211] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.backend_expiration_time = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.857379] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.config_prefix = cache.oslo {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.857550] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.dead_timeout = 60.0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.857714] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.debug_cache_backend = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.857872] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.enable_retry_client = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.858037] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.enable_socket_keepalive = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.858214] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.enabled = True {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.858375] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.enforce_fips_mode = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.858536] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.expiration_time = 600 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.858711] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.hashclient_retry_attempts = 2 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.858889] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.hashclient_retry_delay = 1.0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.859075] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.memcache_dead_retry = 300 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.859250] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.memcache_password = **** {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.859423] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.859601] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.859774] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.memcache_pool_maxsize = 10 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.859948] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.memcache_pool_unused_timeout = 60 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.860137] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.memcache_sasl_enabled = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.860329] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.memcache_servers = ['localhost:11211'] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.860511] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.memcache_socket_timeout = 1.0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.860685] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.memcache_username = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.860859] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.proxies = [] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.861047] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.redis_db = 0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.861222] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.redis_password = **** {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.861412] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.redis_sentinel_service_name = mymaster {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.861590] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.861772] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.redis_server = localhost:6379 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.861949] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.redis_socket_timeout = 1.0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.862219] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.redis_username = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.862356] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.retry_attempts = 2 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.862484] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.retry_delay = 0.0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.862661] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.socket_keepalive_count = 1 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.862835] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.socket_keepalive_idle = 1 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.863034] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.socket_keepalive_interval = 1 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.863197] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.tls_allowed_ciphers = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.863368] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.tls_cafile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.863541] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.tls_certfile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.863713] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.tls_enabled = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.863883] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cache.tls_keyfile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.864075] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cinder.auth_section = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.864265] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cinder.auth_type = password {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.864439] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cinder.cafile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.864626] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cinder.catalog_info = volumev3::publicURL {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.864797] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cinder.certfile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.864971] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cinder.collect_timing = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.865162] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cinder.cross_az_attach = True {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.865336] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cinder.debug = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.865507] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cinder.endpoint_template = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.865684] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cinder.http_retries = 3 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.865859] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cinder.insecure = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.866038] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cinder.keyfile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.866226] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cinder.os_region_name = RegionOne {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.866403] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cinder.split_loggers = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.866573] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cinder.timeout = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.866758] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.866927] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] compute.cpu_dedicated_set = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.867110] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] compute.cpu_shared_set = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.867287] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] compute.image_type_exclude_list = [] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.867454] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] compute.live_migration_wait_for_vif_plug = True {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.867621] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] compute.max_concurrent_disk_ops = 0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.867784] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] compute.max_disk_devices_to_attach = -1 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.867944] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.868124] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.868287] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] compute.resource_provider_association_refresh = 300 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.868447] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.868606] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] compute.shutdown_retry_interval = 10 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.868783] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.868957] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] conductor.workers = 2 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.869147] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] console.allowed_origins = [] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.869306] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] console.ssl_ciphers = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.869474] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] console.ssl_minimum_version = default {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.869643] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] consoleauth.enforce_session_timeout = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.869807] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] consoleauth.token_ttl = 600 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.869979] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cyborg.cafile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.870148] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cyborg.certfile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.870311] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cyborg.collect_timing = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.870482] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cyborg.connect_retries = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.870637] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cyborg.connect_retry_delay = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.870791] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cyborg.endpoint_override = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.870948] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cyborg.insecure = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.871836] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cyborg.keyfile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.871836] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cyborg.max_version = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.871836] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cyborg.min_version = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.871836] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cyborg.region_name = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.871836] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cyborg.retriable_status_codes = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.872308] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cyborg.service_name = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.872308] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cyborg.service_type = accelerator {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.872308] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cyborg.split_loggers = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.872455] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cyborg.status_code_retries = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.872662] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cyborg.status_code_retry_delay = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.872762] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cyborg.timeout = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.872888] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.873074] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] cyborg.version = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.873257] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] database.asyncio_connection = **** {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.873420] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] database.asyncio_slave_connection = **** {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.873616] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] database.backend = sqlalchemy {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.873736] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] database.connection = **** {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.873900] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] database.connection_debug = 0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.874086] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] database.connection_parameters = {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.874250] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] database.connection_recycle_time = 3600 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.874414] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] database.connection_trace = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.874573] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] database.db_inc_retry_interval = True {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.874735] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] database.db_max_retries = 20 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.874897] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] database.db_max_retry_interval = 10 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.875074] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] database.db_retry_interval = 1 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.875239] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] database.max_overflow = 50 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.875398] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] database.max_pool_size = 5 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.875557] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] database.max_retries = 10 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.875732] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] database.mysql_sql_mode = TRADITIONAL {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.875886] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] database.mysql_wsrep_sync_wait = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.876052] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] database.pool_timeout = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.876220] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] database.retry_interval = 10 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.876377] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] database.slave_connection = **** {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.876536] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] database.sqlite_synchronous = True {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.876697] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] database.use_db_reconnect = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.876861] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api_database.asyncio_connection = **** {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.877022] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api_database.asyncio_slave_connection = **** {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.877195] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api_database.backend = sqlalchemy {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.877372] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api_database.connection = **** {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.877525] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api_database.connection_debug = 0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.877685] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api_database.connection_parameters = {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.877844] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api_database.connection_recycle_time = 3600 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.878011] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api_database.connection_trace = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.878195] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api_database.db_inc_retry_interval = True {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.878338] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api_database.db_max_retries = 20 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.878501] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api_database.db_max_retry_interval = 10 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.878653] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api_database.db_retry_interval = 1 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.878812] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api_database.max_overflow = 50 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.878970] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api_database.max_pool_size = 5 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.879142] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api_database.max_retries = 10 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.879310] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.879468] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api_database.mysql_wsrep_sync_wait = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.879624] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api_database.pool_timeout = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.879784] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api_database.retry_interval = 10 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.879940] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api_database.slave_connection = **** {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.880113] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] api_database.sqlite_synchronous = True {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.880288] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] devices.enabled_mdev_types = [] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.880474] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.880640] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] ephemeral_storage_encryption.default_format = luks {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.880799] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] ephemeral_storage_encryption.enabled = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.880960] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] ephemeral_storage_encryption.key_size = 512 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.881144] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] glance.api_servers = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.881305] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] glance.cafile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.881472] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] glance.certfile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.881622] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] glance.collect_timing = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.881778] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] glance.connect_retries = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.881931] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] glance.connect_retry_delay = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.882100] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] glance.debug = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.882265] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] glance.default_trusted_certificate_ids = [] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.882424] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] glance.enable_certificate_validation = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.882584] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] glance.enable_rbd_download = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.882796] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] glance.endpoint_override = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.882901] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] glance.insecure = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.883056] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] glance.keyfile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.883217] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] glance.max_version = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.883369] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] glance.min_version = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.883584] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] glance.num_retries = 3 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.883692] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] glance.rbd_ceph_conf = {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.883853] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] glance.rbd_connect_timeout = 5 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.884043] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] glance.rbd_pool = {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.884191] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] glance.rbd_user = {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.884352] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] glance.region_name = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.884507] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] glance.retriable_status_codes = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.884663] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] glance.service_name = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.884827] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] glance.service_type = image {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.884984] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] glance.split_loggers = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.885153] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] glance.status_code_retries = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.885308] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] glance.status_code_retry_delay = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.885464] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] glance.timeout = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.885643] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.885804] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] glance.verify_glance_signatures = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.885958] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] glance.version = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.886137] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] guestfs.debug = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.886303] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] manila.auth_section = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.886465] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] manila.auth_type = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.886622] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] manila.cafile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.886777] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] manila.certfile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.886937] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] manila.collect_timing = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.887105] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] manila.connect_retries = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.887265] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] manila.connect_retry_delay = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.887421] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] manila.endpoint_override = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.887584] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] manila.insecure = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.887737] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] manila.keyfile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.887889] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] manila.max_version = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.888055] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] manila.min_version = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.888213] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] manila.region_name = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.888368] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] manila.retriable_status_codes = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.888526] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] manila.service_name = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.888692] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] manila.service_type = shared-file-system {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.888852] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] manila.share_apply_policy_timeout = 10 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.889039] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] manila.split_loggers = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.889187] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] manila.status_code_retries = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.889344] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] manila.status_code_retry_delay = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.889519] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] manila.timeout = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.889675] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.889835] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] manila.version = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.890008] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] mks.enabled = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.890388] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.890584] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] image_cache.manager_interval = 2400 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.890744] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] image_cache.precache_concurrency = 1 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.890909] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] image_cache.remove_unused_base_images = True {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.891091] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.891258] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.891432] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] image_cache.subdirectory_name = _base {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.891606] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] ironic.api_max_retries = 60 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.891766] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] ironic.api_retry_interval = 2 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.891915] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] ironic.auth_section = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.892087] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] ironic.auth_type = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.892243] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] ironic.cafile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.892398] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] ironic.certfile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.892557] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] ironic.collect_timing = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.892716] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] ironic.conductor_group = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.892927] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] ironic.connect_retries = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.893030] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] ironic.connect_retry_delay = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.893185] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] ironic.endpoint_override = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.893342] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] ironic.insecure = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.893492] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] ironic.keyfile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.893647] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] ironic.max_version = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.893798] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] ironic.min_version = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.893956] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] ironic.peer_list = [] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.894122] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] ironic.region_name = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.894276] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] ironic.retriable_status_codes = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.894433] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] ironic.serial_console_state_timeout = 10 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.894588] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] ironic.service_name = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.894751] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] ironic.service_type = baremetal {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.894904] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] ironic.shard = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.896322] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] ironic.split_loggers = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.896322] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] ironic.status_code_retries = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.896322] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] ironic.status_code_retry_delay = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.896322] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] ironic.timeout = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.896322] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.896322] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] ironic.version = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.896627] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.896627] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] key_manager.fixed_key = **** {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.896627] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.896627] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] barbican.barbican_api_version = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.896627] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] barbican.barbican_endpoint = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.896845] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] barbican.barbican_endpoint_type = public {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.897061] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] barbican.barbican_region_name = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.897157] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] barbican.cafile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.897243] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] barbican.certfile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.897395] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] barbican.collect_timing = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.897554] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] barbican.insecure = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.897708] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] barbican.keyfile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.897865] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] barbican.number_of_retries = 60 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.898033] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] barbican.retry_delay = 1 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.898190] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] barbican.send_service_user_token = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.898349] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] barbican.split_loggers = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.898503] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] barbican.timeout = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.898663] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] barbican.verify_ssl = True {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.898815] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] barbican.verify_ssl_path = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.898977] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] barbican_service_user.auth_section = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.899148] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] barbican_service_user.auth_type = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.899301] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] barbican_service_user.cafile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.899453] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] barbican_service_user.certfile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.899612] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] barbican_service_user.collect_timing = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.899767] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] barbican_service_user.insecure = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.899919] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] barbican_service_user.keyfile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.900087] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] barbican_service_user.split_loggers = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.900244] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] barbican_service_user.timeout = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.900408] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vault.approle_role_id = **** {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.900580] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vault.approle_secret_id = **** {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.900730] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vault.kv_mountpoint = secret {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.900883] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vault.kv_path = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.901060] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vault.kv_version = 2 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.901222] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vault.namespace = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.901376] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vault.root_token_id = **** {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.901533] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vault.ssl_ca_crt_file = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.901700] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vault.timeout = 60.0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.901859] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vault.use_ssl = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.902028] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.902202] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] keystone.cafile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.902359] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] keystone.certfile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.902519] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] keystone.collect_timing = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.902677] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] keystone.connect_retries = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.902832] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] keystone.connect_retry_delay = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.903072] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] keystone.endpoint_override = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.903155] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] keystone.insecure = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.903301] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] keystone.keyfile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.903454] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] keystone.max_version = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.903609] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] keystone.min_version = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.903765] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] keystone.region_name = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.903922] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] keystone.retriable_status_codes = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.904089] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] keystone.service_name = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.904257] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] keystone.service_type = identity {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.904416] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] keystone.split_loggers = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.904569] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] keystone.status_code_retries = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.904724] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] keystone.status_code_retry_delay = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.904874] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] keystone.timeout = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.905055] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.905218] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] keystone.version = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.905410] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.ceph_mount_options = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.905861] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.ceph_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.906062] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.connection_uri = {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.906236] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.cpu_mode = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.906408] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.cpu_model_extra_flags = [] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.906576] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.cpu_models = [] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.906753] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.cpu_power_governor_high = performance {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.906921] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.cpu_power_governor_low = powersave {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.907097] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.cpu_power_management = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.907270] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.907439] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.device_detach_attempts = 8 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.907606] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.device_detach_timeout = 20 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.907773] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.disk_cachemodes = [] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.907934] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.disk_prefix = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.908114] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.enabled_perf_events = [] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.908280] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.file_backed_memory = 0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.908446] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.gid_maps = [] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.908606] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.hw_disk_discard = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.908763] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.hw_machine_type = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.908930] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.images_rbd_ceph_conf = {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.909103] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.909266] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.909431] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.images_rbd_glance_store_name = {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.909597] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.images_rbd_pool = rbd {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.909761] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.images_type = default {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.909924] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.images_volume_group = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.910084] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.inject_key = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.910247] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.inject_partition = -2 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.910405] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.inject_password = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.910566] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.iscsi_iface = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.910724] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.iser_use_multipath = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.910881] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.live_migration_bandwidth = 0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.911048] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.live_migration_completion_timeout = 800 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.911212] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.live_migration_downtime = 500 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.911367] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.live_migration_downtime_delay = 75 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.911526] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.live_migration_downtime_steps = 10 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.911683] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.live_migration_inbound_addr = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.911837] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.live_migration_permit_auto_converge = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.911994] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.live_migration_permit_post_copy = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.912163] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.live_migration_scheme = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.912331] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.live_migration_timeout_action = abort {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.912493] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.live_migration_tunnelled = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.912650] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.live_migration_uri = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.912808] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.live_migration_with_native_tls = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.912960] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.max_queues = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.913204] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.mem_stats_period_seconds = 10 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.913360] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.913521] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.nfs_mount_options = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.913818] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.913988] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.num_aoe_discover_tries = 3 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.914167] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.num_iser_scan_tries = 5 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.914326] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.num_memory_encrypted_guests = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.914486] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.num_nvme_discover_tries = 5 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.914647] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.num_pcie_ports = 0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.914808] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.num_volume_scan_tries = 5 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.914966] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.pmem_namespaces = [] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.915136] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.quobyte_client_cfg = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.915426] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.915600] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.rbd_connect_timeout = 5 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.915761] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.915924] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.916112] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.rbd_secret_uuid = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.916253] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.rbd_user = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.916413] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.realtime_scheduler_priority = 1 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.916589] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.remote_filesystem_transport = ssh {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.916751] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.rescue_image_id = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.916906] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.rescue_kernel_id = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.917076] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.rescue_ramdisk_id = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.917251] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.rng_dev_path = /dev/urandom {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.917412] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.rx_queue_size = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.917581] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.smbfs_mount_options = {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.917886] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.918074] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.snapshot_compression = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.918240] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.snapshot_image_format = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.918475] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.918638] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.sparse_logical_volumes = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.918798] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.swtpm_enabled = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.918963] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.swtpm_group = tss {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.919144] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.swtpm_user = tss {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.919314] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.sysinfo_serial = unique {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.919471] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.tb_cache_size = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.919629] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.tx_queue_size = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.919793] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.uid_maps = [] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.919952] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.use_virtio_for_bridges = True {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.920132] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.virt_type = kvm {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.920299] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.volume_clear = zero {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.920464] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.volume_clear_size = 0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.920625] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.volume_enforce_multipath = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.920789] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.volume_use_multipath = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.920945] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.vzstorage_cache_path = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.921127] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.921297] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.vzstorage_mount_group = qemu {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.921459] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.vzstorage_mount_opts = [] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.921627] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.921916] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.922107] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.vzstorage_mount_user = stack {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.922276] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.922449] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] neutron.auth_section = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.922625] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] neutron.auth_type = password {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.922782] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] neutron.cafile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.922938] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] neutron.certfile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.923115] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] neutron.collect_timing = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.923337] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] neutron.connect_retries = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.923416] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] neutron.connect_retry_delay = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.923588] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] neutron.default_floating_pool = public {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.923745] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] neutron.endpoint_override = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.923904] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] neutron.extension_sync_interval = 600 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.924076] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] neutron.http_retries = 3 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.924245] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] neutron.insecure = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.924404] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] neutron.keyfile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.924562] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] neutron.max_version = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.924731] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] neutron.metadata_proxy_shared_secret = **** {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.924886] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] neutron.min_version = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.925062] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] neutron.ovs_bridge = br-int {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.925234] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] neutron.physnets = [] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.925400] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] neutron.region_name = RegionOne {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.925555] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] neutron.retriable_status_codes = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.925723] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] neutron.service_metadata_proxy = True {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.925880] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] neutron.service_name = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.926061] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] neutron.service_type = network {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.926228] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] neutron.split_loggers = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.926385] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] neutron.status_code_retries = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.926542] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] neutron.status_code_retry_delay = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.926698] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] neutron.timeout = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.926877] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.927044] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] neutron.version = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.927220] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] notifications.bdms_in_notifications = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.927394] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] notifications.default_level = INFO {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.927555] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] notifications.include_share_mapping = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.927736] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] notifications.notification_format = unversioned {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.927900] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] notifications.notify_on_state_change = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.928084] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.928261] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] pci.alias = [] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.928430] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] pci.device_spec = [] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.928594] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] pci.report_in_placement = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.928763] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] placement.auth_section = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.928933] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] placement.auth_type = password {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.929114] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] placement.auth_url = http://10.180.1.21/identity {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.929275] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] placement.cafile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.929439] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] placement.certfile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.929590] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] placement.collect_timing = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.929749] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] placement.connect_retries = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.929905] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] placement.connect_retry_delay = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.930071] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] placement.default_domain_id = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.930229] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] placement.default_domain_name = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.930384] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] placement.domain_id = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.930543] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] placement.domain_name = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.930698] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] placement.endpoint_override = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.930856] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] placement.insecure = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.931021] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] placement.keyfile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.931179] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] placement.max_version = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.931333] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] placement.min_version = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.931497] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] placement.password = **** {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.931654] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] placement.project_domain_id = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.931815] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] placement.project_domain_name = Default {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.931977] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] placement.project_id = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.932164] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] placement.project_name = service {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.932331] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] placement.region_name = RegionOne {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.932492] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] placement.retriable_status_codes = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.932653] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] placement.service_name = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.932821] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] placement.service_type = placement {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.932982] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] placement.split_loggers = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.933151] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] placement.status_code_retries = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.933310] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] placement.status_code_retry_delay = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.933481] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] placement.system_scope = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.933621] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] placement.timeout = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.933778] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] placement.trust_id = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.933928] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] placement.user_domain_id = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.934106] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] placement.user_domain_name = Default {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.934265] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] placement.user_id = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.934446] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] placement.username = nova {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.934632] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.934791] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] placement.version = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.934970] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] quota.cores = 20 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.935171] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] quota.count_usage_from_placement = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.935342] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.935509] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] quota.injected_file_content_bytes = 10240 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.935673] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] quota.injected_file_path_length = 255 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.935834] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] quota.injected_files = 5 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.935993] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] quota.instances = 10 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.936170] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] quota.key_pairs = 100 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.936329] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] quota.metadata_items = 128 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.936487] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] quota.ram = 51200 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.936648] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] quota.recheck_quota = True {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.936808] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] quota.server_group_members = 10 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.936967] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] quota.server_groups = 10 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.937186] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] quota.unified_limits_resource_list = ['servers'] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.937357] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] quota.unified_limits_resource_strategy = require {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.937526] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.937689] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.937847] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] scheduler.image_metadata_prefilter = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.938012] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.938175] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] scheduler.max_attempts = 3 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.938335] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] scheduler.max_placement_results = 1000 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.938492] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.938650] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] scheduler.query_placement_for_image_type_support = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.938803] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.938971] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] scheduler.workers = 2 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.939153] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.939322] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.939514] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.939666] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.939826] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.939985] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.940161] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.940345] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.940509] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] filter_scheduler.host_subset_size = 1 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.940673] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.940826] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] filter_scheduler.image_properties_default_architecture = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.940983] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] filter_scheduler.image_props_weight_multiplier = 0.0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.941156] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] filter_scheduler.image_props_weight_setting = [] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.941330] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.941493] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] filter_scheduler.isolated_hosts = [] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.941653] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] filter_scheduler.isolated_images = [] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.941809] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] filter_scheduler.max_instances_per_host = 50 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.941961] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.942134] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.942293] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] filter_scheduler.pci_in_placement = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.942452] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.942612] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.942770] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.942925] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.943095] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.943259] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.943419] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] filter_scheduler.track_instance_changes = True {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.943613] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.943772] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] metrics.required = True {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.944061] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] metrics.weight_multiplier = 1.0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.944256] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] metrics.weight_of_unavailable = -10000.0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.944426] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] metrics.weight_setting = [] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.944754] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.944928] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] serial_console.enabled = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.945121] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] serial_console.port_range = 10000:20000 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.945295] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.945465] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.945635] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] serial_console.serialproxy_port = 6083 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.945801] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] service_user.auth_section = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.945972] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] service_user.auth_type = password {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.946145] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] service_user.cafile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.946300] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] service_user.certfile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.946461] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] service_user.collect_timing = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.946620] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] service_user.insecure = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.946772] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] service_user.keyfile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.946937] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] service_user.send_service_user_token = True {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.947108] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] service_user.split_loggers = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.947266] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] service_user.timeout = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.947435] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] spice.agent_enabled = True {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.947595] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] spice.enabled = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.947912] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.948136] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] spice.html5proxy_host = 0.0.0.0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.948306] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] spice.html5proxy_port = 6082 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.948467] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] spice.image_compression = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.948629] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] spice.jpeg_compression = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.948786] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] spice.playback_compression = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.948949] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] spice.require_secure = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.949134] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] spice.server_listen = 127.0.0.1 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.949307] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.949589] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] spice.spice_direct_proxy_base_url = http://127.0.0.1:13002/nova {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.949761] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] spice.streaming_mode = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.949921] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] spice.zlib_compression = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.950101] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] upgrade_levels.baseapi = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.950273] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] upgrade_levels.compute = auto {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.950429] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] upgrade_levels.conductor = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.950584] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] upgrade_levels.scheduler = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.950750] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vendordata_dynamic_auth.auth_section = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.950909] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vendordata_dynamic_auth.auth_type = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.951075] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vendordata_dynamic_auth.cafile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.951234] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vendordata_dynamic_auth.certfile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.951393] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vendordata_dynamic_auth.collect_timing = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.951550] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vendordata_dynamic_auth.insecure = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.951704] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vendordata_dynamic_auth.keyfile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.951861] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vendordata_dynamic_auth.split_loggers = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.952034] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vendordata_dynamic_auth.timeout = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.952198] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vmware.api_retry_count = 10 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.952355] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vmware.ca_file = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.952523] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vmware.cache_prefix = devstack-image-cache {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.952689] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vmware.cluster_name = testcl1 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.952851] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vmware.connection_pool_size = 10 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.953012] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vmware.console_delay_seconds = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.953188] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vmware.datastore_regex = ^datastore.* {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.953408] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.953581] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vmware.host_password = **** {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.953779] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vmware.host_port = 443 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.953907] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vmware.host_username = administrator@vsphere.local {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.954084] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vmware.insecure = True {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.954247] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vmware.integration_bridge = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.954408] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vmware.maximum_objects = 100 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.954575] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vmware.pbm_default_policy = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.954731] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vmware.pbm_enabled = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.954887] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vmware.pbm_wsdl_location = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.955065] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.955231] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vmware.serial_port_proxy_uri = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.955388] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vmware.serial_port_service_uri = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.955557] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vmware.task_poll_interval = 0.5 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.955728] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vmware.use_linked_clone = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.955893] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vmware.vnc_keymap = en-us {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.956067] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vmware.vnc_port = 5900 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.956232] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vmware.vnc_port_total = 10000 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.956418] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vnc.auth_schemes = ['none'] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.956589] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vnc.enabled = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.956885] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.957079] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.957252] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vnc.novncproxy_port = 6080 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.957439] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vnc.server_listen = 127.0.0.1 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.957625] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.957780] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vnc.vencrypt_ca_certs = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.957936] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vnc.vencrypt_client_cert = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.958106] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vnc.vencrypt_client_key = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.958287] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.958450] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] workarounds.disable_deep_image_inspection = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.958609] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] workarounds.disable_fallback_pcpu_query = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.958767] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] workarounds.disable_group_policy_check_upcall = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.958927] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.959098] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] workarounds.disable_rootwrap = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.959264] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] workarounds.enable_numa_live_migration = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.959422] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.959583] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.960397] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] workarounds.handle_virt_lifecycle_events = True {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.960397] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] workarounds.libvirt_disable_apic = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.960397] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] workarounds.never_download_image_if_on_rbd = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.960397] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.960598] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.960726] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.960892] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.961067] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.961235] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.961399] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.961559] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.961728] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.961910] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.962090] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] wsgi.client_socket_timeout = 900 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.962257] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] wsgi.default_pool_size = 1000 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.962419] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] wsgi.keep_alive = True {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.962583] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] wsgi.max_header_line = 16384 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.962739] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] wsgi.secure_proxy_ssl_header = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.962894] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] wsgi.ssl_ca_file = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.963059] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] wsgi.ssl_cert_file = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.963219] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] wsgi.ssl_key_file = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.963378] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] wsgi.tcp_keepidle = 600 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.963551] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.963713] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] zvm.ca_file = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.963885] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] zvm.cloud_connector_url = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.964208] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.964386] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] zvm.reachable_timeout = 300 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.964559] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.964738] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.964916] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] profiler.connection_string = messaging:// {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.965095] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] profiler.enabled = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.965270] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] profiler.es_doc_type = notification {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.965434] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] profiler.es_scroll_size = 10000 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.965605] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] profiler.es_scroll_time = 2m {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.965768] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] profiler.filter_error_trace = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.965936] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] profiler.hmac_keys = **** {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.966116] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] profiler.sentinel_service_name = mymaster {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.966289] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] profiler.socket_timeout = 0.1 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.966454] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] profiler.trace_requests = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.966617] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] profiler.trace_sqlalchemy = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.966799] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] profiler_jaeger.process_tags = {} {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.966960] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] profiler_jaeger.service_name_prefix = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.967136] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] profiler_otlp.service_name_prefix = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.967305] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] remote_debug.host = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.967466] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] remote_debug.port = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.967640] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.967802] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.967963] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.968139] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.968304] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.968467] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.968627] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.968786] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.968945] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.969132] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_rabbit.hostname = devstack {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.969293] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.969465] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.969632] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.969795] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.969959] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_rabbit.kombu_reconnect_splay = 0.0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.970142] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.970304] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.970487] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.970655] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.970816] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.970975] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.971155] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.971329] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.971498] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.971660] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.971820] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.971979] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.972153] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.972314] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.972476] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.972639] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_rabbit.ssl = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.972815] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.972977] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.973154] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.973323] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.973490] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_rabbit.ssl_version = {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.973654] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.973843] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.974011] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_notifications.retry = -1 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.974198] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.974372] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_messaging_notifications.transport_url = **** {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.974545] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_limit.auth_section = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.974709] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_limit.auth_type = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.974868] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_limit.cafile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.975033] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_limit.certfile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.975201] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_limit.collect_timing = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.975360] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_limit.connect_retries = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.975521] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_limit.connect_retry_delay = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.975678] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_limit.endpoint_id = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.975851] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_limit.endpoint_interface = publicURL {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.976024] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_limit.endpoint_override = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.976175] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_limit.endpoint_region_name = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.976334] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_limit.endpoint_service_name = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.976488] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_limit.endpoint_service_type = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.976651] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_limit.insecure = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.976808] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_limit.keyfile = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.976963] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_limit.max_version = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.977134] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_limit.min_version = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.977293] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_limit.region_name = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.977451] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_limit.retriable_status_codes = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.977608] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_limit.service_name = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.977765] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_limit.service_type = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.977925] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_limit.split_loggers = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.978092] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_limit.status_code_retries = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.978253] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_limit.status_code_retry_delay = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.978411] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_limit.timeout = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.978568] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_limit.valid_interfaces = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.978725] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_limit.version = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.978888] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_reports.file_event_handler = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.979061] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_reports.file_event_handler_interval = 1 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.979222] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] oslo_reports.log_dir = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.979393] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.979552] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vif_plug_linux_bridge_privileged.group = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.979711] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.979879] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.980051] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.980216] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vif_plug_linux_bridge_privileged.user = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.980384] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.980546] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vif_plug_ovs_privileged.group = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.980703] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vif_plug_ovs_privileged.helper_command = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.980866] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.981035] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.981196] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] vif_plug_ovs_privileged.user = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.981365] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] os_vif_linux_bridge.flat_interface = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.981545] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.981719] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.981888] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.982071] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.982249] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.982415] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.982577] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] os_vif_linux_bridge.vlan_interface = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.982755] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.982927] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] os_vif_ovs.isolate_vif = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.983104] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.983271] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.983436] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.983607] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] os_vif_ovs.ovsdb_interface = native {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.983766] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] os_vif_ovs.per_port_bridge = False {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.983934] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] privsep_osbrick.capabilities = [21] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.984108] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] privsep_osbrick.group = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.984268] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] privsep_osbrick.helper_command = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.984433] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.984599] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] privsep_osbrick.thread_pool_size = 8 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.984756] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] privsep_osbrick.user = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.984926] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.985097] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] nova_sys_admin.group = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.985259] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] nova_sys_admin.helper_command = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.985426] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.985589] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] nova_sys_admin.thread_pool_size = 8 {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.985747] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] nova_sys_admin.user = None {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 543.985880] env[70020]: DEBUG oslo_service.backend.eventlet.service [None req-441da7fc-78e8-4faa-8061-399bd571d7bc None None] ******************************************************************************** {{(pid=70020) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 543.986329] env[70020]: INFO nova.service [-] Starting compute node (version 31.0.1) [ 544.490207] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8899e433-bc13-4138-b46a-40c207433558 None None] Getting list of instances from cluster (obj){ [ 544.490207] env[70020]: value = "domain-c8" [ 544.490207] env[70020]: _type = "ClusterComputeResource" [ 544.490207] env[70020]: } {{(pid=70020) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 544.491882] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-067f5232-22a1-435b-8f8e-9ef6ba0e6247 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.504582] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8899e433-bc13-4138-b46a-40c207433558 None None] Got total of 0 instances {{(pid=70020) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 544.505443] env[70020]: WARNING nova.virt.vmwareapi.driver [None req-8899e433-bc13-4138-b46a-40c207433558 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 544.506115] env[70020]: INFO nova.virt.node [None req-8899e433-bc13-4138-b46a-40c207433558 None None] Generated node identity ee72c483-d9d9-4e62-8f73-e9f24668500d [ 544.506442] env[70020]: INFO nova.virt.node [None req-8899e433-bc13-4138-b46a-40c207433558 None None] Wrote node identity ee72c483-d9d9-4e62-8f73-e9f24668500d to /opt/stack/data/n-cpu-1/compute_id [ 545.009666] env[70020]: WARNING nova.compute.manager [None req-8899e433-bc13-4138-b46a-40c207433558 None None] Compute nodes ['ee72c483-d9d9-4e62-8f73-e9f24668500d'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 546.014923] env[70020]: INFO nova.compute.manager [None req-8899e433-bc13-4138-b46a-40c207433558 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 547.021321] env[70020]: WARNING nova.compute.manager [None req-8899e433-bc13-4138-b46a-40c207433558 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 547.021652] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8899e433-bc13-4138-b46a-40c207433558 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 547.021799] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8899e433-bc13-4138-b46a-40c207433558 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 547.021951] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8899e433-bc13-4138-b46a-40c207433558 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 547.022137] env[70020]: DEBUG nova.compute.resource_tracker [None req-8899e433-bc13-4138-b46a-40c207433558 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=70020) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 547.023168] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-008a5f10-be66-4354-93ea-2c608096e072 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.032367] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f965a2ac-c31b-4e38-8843-c897fc8bbdc8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.047962] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e153ba86-e747-4927-a01e-c344bcbb41cc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.054526] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c49d8bb-c4d8-4228-9207-08eacf56b234 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.083487] env[70020]: DEBUG nova.compute.resource_tracker [None req-8899e433-bc13-4138-b46a-40c207433558 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180937MB free_disk=76GB free_vcpus=48 pci_devices=None {{(pid=70020) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 547.083626] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8899e433-bc13-4138-b46a-40c207433558 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 547.083831] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8899e433-bc13-4138-b46a-40c207433558 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 547.587662] env[70020]: WARNING nova.compute.resource_tracker [None req-8899e433-bc13-4138-b46a-40c207433558 None None] No compute node record for cpu-1:ee72c483-d9d9-4e62-8f73-e9f24668500d: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host ee72c483-d9d9-4e62-8f73-e9f24668500d could not be found. [ 548.090638] env[70020]: INFO nova.compute.resource_tracker [None req-8899e433-bc13-4138-b46a-40c207433558 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: ee72c483-d9d9-4e62-8f73-e9f24668500d [ 549.599794] env[70020]: DEBUG nova.compute.resource_tracker [None req-8899e433-bc13-4138-b46a-40c207433558 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=70020) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 549.599794] env[70020]: DEBUG nova.compute.resource_tracker [None req-8899e433-bc13-4138-b46a-40c207433558 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=70020) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 549.761092] env[70020]: INFO nova.scheduler.client.report [None req-8899e433-bc13-4138-b46a-40c207433558 None None] [req-ac178496-13c8-4e79-b34b-8fb78807f4e7] Created resource provider record via placement API for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 549.778018] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96b17319-6aa5-47b1-96f9-063210140466 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.785515] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f0d884e-ed0e-4049-af37-ae96de98f7a7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.818029] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aeb1608-5bd1-4f44-af1c-8b87e9ee69a8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.825103] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7932944c-9e83-4978-9e54-83eac68eed04 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.838624] env[70020]: DEBUG nova.compute.provider_tree [None req-8899e433-bc13-4138-b46a-40c207433558 None None] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 550.372818] env[70020]: DEBUG nova.scheduler.client.report [None req-8899e433-bc13-4138-b46a-40c207433558 None None] Updated inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 550.373064] env[70020]: DEBUG nova.compute.provider_tree [None req-8899e433-bc13-4138-b46a-40c207433558 None None] Updating resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d generation from 0 to 1 during operation: update_inventory {{(pid=70020) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 550.373205] env[70020]: DEBUG nova.compute.provider_tree [None req-8899e433-bc13-4138-b46a-40c207433558 None None] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 550.424067] env[70020]: DEBUG nova.compute.provider_tree [None req-8899e433-bc13-4138-b46a-40c207433558 None None] Updating resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d generation from 1 to 2 during operation: update_traits {{(pid=70020) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 550.930290] env[70020]: DEBUG nova.compute.resource_tracker [None req-8899e433-bc13-4138-b46a-40c207433558 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=70020) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 550.930687] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8899e433-bc13-4138-b46a-40c207433558 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.847s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 550.930687] env[70020]: DEBUG nova.service [None req-8899e433-bc13-4138-b46a-40c207433558 None None] Creating RPC server for service compute {{(pid=70020) start /opt/stack/nova/nova/service.py:186}} [ 550.945144] env[70020]: DEBUG nova.service [None req-8899e433-bc13-4138-b46a-40c207433558 None None] Join ServiceGroup membership for this service compute {{(pid=70020) start /opt/stack/nova/nova/service.py:203}} [ 550.945393] env[70020]: DEBUG nova.servicegroup.drivers.db [None req-8899e433-bc13-4138-b46a-40c207433558 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=70020) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 550.946039] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._sync_power_states {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 551.449101] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Getting list of instances from cluster (obj){ [ 551.449101] env[70020]: value = "domain-c8" [ 551.449101] env[70020]: _type = "ClusterComputeResource" [ 551.449101] env[70020]: } {{(pid=70020) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 551.450308] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f224b322-d568-44d2-a7e0-0546e9e7f63c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.459403] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Got total of 0 instances {{(pid=70020) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 551.459623] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 551.459908] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Getting list of instances from cluster (obj){ [ 551.459908] env[70020]: value = "domain-c8" [ 551.459908] env[70020]: _type = "ClusterComputeResource" [ 551.459908] env[70020]: } {{(pid=70020) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 551.460801] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e665520-a16a-4278-808c-0683ee3e6923 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.468278] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Got total of 0 instances {{(pid=70020) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 591.182279] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Acquiring lock "01773af2-4ce2-4d2a-b334-ab99348000a5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 591.182581] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Lock "01773af2-4ce2-4d2a-b334-ab99348000a5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 591.553895] env[70020]: DEBUG oslo_concurrency.lockutils [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Acquiring lock "0cc49db6-1574-4e51-8692-b79ee14bc25d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 591.553895] env[70020]: DEBUG oslo_concurrency.lockutils [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Lock "0cc49db6-1574-4e51-8692-b79ee14bc25d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 591.685620] env[70020]: DEBUG nova.compute.manager [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 591.799186] env[70020]: DEBUG oslo_concurrency.lockutils [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "81d5a1b4-1398-4fca-b500-aa2a3dc41494" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 591.799542] env[70020]: DEBUG oslo_concurrency.lockutils [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "81d5a1b4-1398-4fca-b500-aa2a3dc41494" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 592.056124] env[70020]: DEBUG nova.compute.manager [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 592.231845] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 592.231845] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 592.235022] env[70020]: INFO nova.compute.claims [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 592.301939] env[70020]: DEBUG nova.compute.manager [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 592.588665] env[70020]: DEBUG oslo_concurrency.lockutils [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 592.834490] env[70020]: DEBUG oslo_concurrency.lockutils [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 593.067323] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Acquiring lock "516341a3-2230-4340-a1e0-ff97bb7a608d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 593.067323] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Lock "516341a3-2230-4340-a1e0-ff97bb7a608d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 593.345040] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9af9a72b-fa50-40b1-9262-bed344c85c25 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.357612] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c235a234-95dd-4af0-a2ad-b51c83a7c6de {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.399034] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de87f9ad-5ad5-40d9-bda7-1604ac6b4a2b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.408178] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f4fee20-7cae-497a-a5b0-acdef98fa2b9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.426407] env[70020]: DEBUG nova.compute.provider_tree [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 593.570123] env[70020]: DEBUG nova.compute.manager [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 593.930721] env[70020]: DEBUG nova.scheduler.client.report [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 594.095596] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 594.437834] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.205s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 594.437834] env[70020]: DEBUG nova.compute.manager [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 594.442163] env[70020]: DEBUG oslo_concurrency.lockutils [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.853s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 594.446515] env[70020]: INFO nova.compute.claims [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 594.948508] env[70020]: DEBUG nova.compute.utils [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 594.949966] env[70020]: DEBUG nova.compute.manager [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 594.954032] env[70020]: DEBUG nova.network.neutron [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 595.464320] env[70020]: DEBUG nova.compute.manager [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 595.504289] env[70020]: DEBUG nova.policy [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8c4fc1ca2cbd45ccb3f40cdf2d8ccc52', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aac90c2646b14eb5abe9caa687968c63', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 595.580712] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6bbc331-a6fd-4296-a063-46c594c724c6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.592534] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b78bd42-19fc-481f-bc1b-a430a623e867 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.631104] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-783cc48c-4475-46cb-a1d3-b2043438ebd4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.639570] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c49d614-22fc-403a-8ce0-380a1815c3a7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.654092] env[70020]: DEBUG nova.compute.provider_tree [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 596.158481] env[70020]: DEBUG nova.scheduler.client.report [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 596.472657] env[70020]: DEBUG oslo_concurrency.lockutils [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Acquiring lock "6a114dce-7ed3-46e1-9d50-c3dd6efd340c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 596.472657] env[70020]: DEBUG oslo_concurrency.lockutils [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Lock "6a114dce-7ed3-46e1-9d50-c3dd6efd340c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 596.480927] env[70020]: DEBUG nova.compute.manager [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 596.519767] env[70020]: DEBUG nova.virt.hardware [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 596.520187] env[70020]: DEBUG nova.virt.hardware [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 596.520187] env[70020]: DEBUG nova.virt.hardware [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 596.520394] env[70020]: DEBUG nova.virt.hardware [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 596.520840] env[70020]: DEBUG nova.virt.hardware [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 596.520840] env[70020]: DEBUG nova.virt.hardware [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 596.520945] env[70020]: DEBUG nova.virt.hardware [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 596.521129] env[70020]: DEBUG nova.virt.hardware [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 596.521584] env[70020]: DEBUG nova.virt.hardware [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 596.521817] env[70020]: DEBUG nova.virt.hardware [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 596.521921] env[70020]: DEBUG nova.virt.hardware [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 596.523552] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06bd76a8-760b-4988-8088-071d869a8277 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.535144] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-940bbdbc-c65d-4105-a1a2-f249601fdf74 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.552535] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5917ab0-0589-4b1a-9b4f-47d9bb0ad930 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.577079] env[70020]: DEBUG nova.network.neutron [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Successfully created port: eb757fe7-6cda-466e-9979-29e56b057f1c {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 596.668638] env[70020]: DEBUG oslo_concurrency.lockutils [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.227s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 596.669257] env[70020]: DEBUG nova.compute.manager [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 596.675252] env[70020]: DEBUG oslo_concurrency.lockutils [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.841s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 596.680089] env[70020]: INFO nova.compute.claims [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 596.978350] env[70020]: DEBUG nova.compute.manager [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 597.185171] env[70020]: DEBUG nova.compute.utils [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 597.189620] env[70020]: DEBUG nova.compute.manager [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Not allocating networking since 'none' was specified. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 597.514094] env[70020]: DEBUG oslo_concurrency.lockutils [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 597.691708] env[70020]: DEBUG nova.compute.manager [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 597.824754] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6e3f754-1b76-464d-a11c-fc18d1010fbc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.833517] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3fe427f-fcb1-4e93-818c-994ac5249a75 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.870706] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2e9af31-7bb8-4f5f-81f3-2230ac5fc3dc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.876346] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddaab7e4-ff01-408f-b5fa-ee572a3f4d0b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.891781] env[70020]: DEBUG nova.compute.provider_tree [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 598.395848] env[70020]: DEBUG nova.scheduler.client.report [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 598.645485] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Acquiring lock "1f95bfa8-bc97-4ed7-8c33-c00297430bf5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 598.645891] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Lock "1f95bfa8-bc97-4ed7-8c33-c00297430bf5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 598.707482] env[70020]: DEBUG nova.compute.manager [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 598.745879] env[70020]: DEBUG nova.virt.hardware [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 598.746121] env[70020]: DEBUG nova.virt.hardware [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 598.746270] env[70020]: DEBUG nova.virt.hardware [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 598.746443] env[70020]: DEBUG nova.virt.hardware [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 598.746891] env[70020]: DEBUG nova.virt.hardware [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 598.746891] env[70020]: DEBUG nova.virt.hardware [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 598.747013] env[70020]: DEBUG nova.virt.hardware [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 598.747696] env[70020]: DEBUG nova.virt.hardware [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 598.747931] env[70020]: DEBUG nova.virt.hardware [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 598.748291] env[70020]: DEBUG nova.virt.hardware [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 598.748382] env[70020]: DEBUG nova.virt.hardware [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 598.751297] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eec0bac-041a-4888-80f6-89f354800f7a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.763080] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ade008b-6186-439b-819b-b41d33209018 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.788047] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Instance VIF info [] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 598.803681] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 598.804363] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7eada08f-8301-4f14-8bb9-ee26bfb0cf68 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.817572] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Created folder: OpenStack in parent group-v4. [ 598.817794] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Creating folder: Project (f20f623eab2f4d43bb5a7ef246e7c7d1). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 598.818063] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1a77c2c7-6ea3-4694-95c9-d6f993af5428 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.829695] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Created folder: Project (f20f623eab2f4d43bb5a7ef246e7c7d1) in parent group-v721521. [ 598.830341] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Creating folder: Instances. Parent ref: group-v721522. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 598.830341] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3f2295c2-6ea8-4dcb-8ccb-f8a7fb9620d6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.841948] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Created folder: Instances in parent group-v721522. [ 598.841948] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 598.842049] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 598.842380] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-751487a9-ba8f-4973-b5b8-9d2a346481b6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.859713] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 598.859713] env[70020]: value = "task-3617612" [ 598.859713] env[70020]: _type = "Task" [ 598.859713] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.870961] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617612, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.903831] env[70020]: DEBUG oslo_concurrency.lockutils [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.228s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 598.907294] env[70020]: DEBUG nova.compute.manager [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 598.914163] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.819s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 598.916427] env[70020]: INFO nova.compute.claims [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 599.146395] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 599.146640] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 599.146832] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 599.147017] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 599.147210] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 599.147389] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 599.147567] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 599.147755] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=70020) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 599.148069] env[70020]: DEBUG nova.compute.manager [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 599.154216] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager.update_available_resource {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 599.377522] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617612, 'name': CreateVM_Task, 'duration_secs': 0.472303} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.377522] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 599.378469] env[70020]: DEBUG oslo_vmware.service [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54dec207-2c25-43a6-be6f-9786738ae9a5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.385933] env[70020]: DEBUG oslo_concurrency.lockutils [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.386061] env[70020]: DEBUG oslo_concurrency.lockutils [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 599.386907] env[70020]: DEBUG oslo_concurrency.lockutils [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 599.387029] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e7ae2d8-714a-442c-ba27-2090e7c3b13c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.392958] env[70020]: DEBUG oslo_vmware.api [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Waiting for the task: (returnval){ [ 599.392958] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]526ee889-fc1d-1781-82ea-3960435b1021" [ 599.392958] env[70020]: _type = "Task" [ 599.392958] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.403951] env[70020]: DEBUG oslo_vmware.api [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]526ee889-fc1d-1781-82ea-3960435b1021, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.428653] env[70020]: DEBUG nova.compute.utils [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 599.432196] env[70020]: DEBUG nova.compute.manager [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 599.432454] env[70020]: DEBUG nova.network.neutron [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 599.649833] env[70020]: DEBUG nova.policy [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b67375d5e85b4ba99d47120945bbf0f6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '52cd193f3ca7403a986d72f072590f4f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 599.664713] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 599.685846] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 599.904172] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Acquiring lock "b0b825d4-534d-4d54-a0c4-b9e507726c47" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 599.904404] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Lock "b0b825d4-534d-4d54-a0c4-b9e507726c47" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 599.915696] env[70020]: DEBUG oslo_concurrency.lockutils [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 599.915940] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 599.916184] env[70020]: DEBUG oslo_concurrency.lockutils [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.916320] env[70020]: DEBUG oslo_concurrency.lockutils [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 599.916715] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 599.916977] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1b897e35-baea-49bd-a10d-0bf57b835a6b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.933047] env[70020]: DEBUG nova.compute.manager [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 599.941865] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 599.942202] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 599.950342] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c15133e0-14d9-405b-ba0c-0cedb9fc304e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.968825] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1b734f3-a494-48c3-9c7d-ee6dd1089dac {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.976712] env[70020]: DEBUG oslo_vmware.api [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Waiting for the task: (returnval){ [ 599.976712] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52105720-5071-7dfb-1e31-a3c59e66d5a0" [ 599.976712] env[70020]: _type = "Task" [ 599.976712] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.995117] env[70020]: DEBUG oslo_vmware.api [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52105720-5071-7dfb-1e31-a3c59e66d5a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.130978] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2384add3-7654-4033-937e-e4e687310dd2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.143031] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b20a923-c0e5-46df-a6e1-a9741e11a24b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.193890] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d2416a9-7110-440e-ad3f-3825faeca100 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.203026] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d30b48f-bad6-4f65-8d24-d21d265c0799 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.217998] env[70020]: DEBUG nova.compute.provider_tree [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 600.379880] env[70020]: DEBUG nova.network.neutron [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Successfully updated port: eb757fe7-6cda-466e-9979-29e56b057f1c {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 600.418129] env[70020]: DEBUG nova.compute.manager [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 600.498668] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Preparing fetch location {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 600.498947] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Creating directory with path [datastore2] vmware_temp/de86a681-3e21-4eca-87c8-3488087d180e/c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 600.499242] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b4ed749e-22c5-46b0-8789-93fd988a707e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.521862] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Created directory with path [datastore2] vmware_temp/de86a681-3e21-4eca-87c8-3488087d180e/c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 600.522085] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Fetch image to [datastore2] vmware_temp/de86a681-3e21-4eca-87c8-3488087d180e/c9cd83bf-fd12-4173-a067-f57d38f23556/tmp-sparse.vmdk {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 600.522253] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Downloading image file data c9cd83bf-fd12-4173-a067-f57d38f23556 to [datastore2] vmware_temp/de86a681-3e21-4eca-87c8-3488087d180e/c9cd83bf-fd12-4173-a067-f57d38f23556/tmp-sparse.vmdk on the data store datastore2 {{(pid=70020) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 600.523202] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62529ec6-9b5e-4c36-8d55-caa96c529ccf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.531262] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9939e7a9-52cc-4d74-85eb-af258667d3ff {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.542054] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48c9c40c-3e18-4749-b195-79ad2fb538fb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.580598] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b4f6d73-7ec3-4ee4-aba6-bfb69b3fc3ef {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.586373] env[70020]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4b0c535e-73ef-4df7-ad86-f0c36dd83c51 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.681116] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Downloading image file data c9cd83bf-fd12-4173-a067-f57d38f23556 to the data store datastore2 {{(pid=70020) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 600.722051] env[70020]: DEBUG nova.scheduler.client.report [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 600.779997] env[70020]: DEBUG oslo_vmware.rw_handles [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/de86a681-3e21-4eca-87c8-3488087d180e/c9cd83bf-fd12-4173-a067-f57d38f23556/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=70020) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 600.857630] env[70020]: DEBUG nova.compute.manager [req-058356d3-87d4-4b3c-9397-449fba4cbc08 req-a3fdd8db-4e82-45df-a7b2-add8faf9c3e5 service nova] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Received event network-vif-plugged-eb757fe7-6cda-466e-9979-29e56b057f1c {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 600.857630] env[70020]: DEBUG oslo_concurrency.lockutils [req-058356d3-87d4-4b3c-9397-449fba4cbc08 req-a3fdd8db-4e82-45df-a7b2-add8faf9c3e5 service nova] Acquiring lock "01773af2-4ce2-4d2a-b334-ab99348000a5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 600.857630] env[70020]: DEBUG oslo_concurrency.lockutils [req-058356d3-87d4-4b3c-9397-449fba4cbc08 req-a3fdd8db-4e82-45df-a7b2-add8faf9c3e5 service nova] Lock "01773af2-4ce2-4d2a-b334-ab99348000a5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 600.857810] env[70020]: DEBUG oslo_concurrency.lockutils [req-058356d3-87d4-4b3c-9397-449fba4cbc08 req-a3fdd8db-4e82-45df-a7b2-add8faf9c3e5 service nova] Lock "01773af2-4ce2-4d2a-b334-ab99348000a5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 600.857810] env[70020]: DEBUG nova.compute.manager [req-058356d3-87d4-4b3c-9397-449fba4cbc08 req-a3fdd8db-4e82-45df-a7b2-add8faf9c3e5 service nova] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] No waiting events found dispatching network-vif-plugged-eb757fe7-6cda-466e-9979-29e56b057f1c {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 600.858030] env[70020]: WARNING nova.compute.manager [req-058356d3-87d4-4b3c-9397-449fba4cbc08 req-a3fdd8db-4e82-45df-a7b2-add8faf9c3e5 service nova] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Received unexpected event network-vif-plugged-eb757fe7-6cda-466e-9979-29e56b057f1c for instance with vm_state building and task_state spawning. [ 600.889512] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Acquiring lock "refresh_cache-01773af2-4ce2-4d2a-b334-ab99348000a5" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.889749] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Acquired lock "refresh_cache-01773af2-4ce2-4d2a-b334-ab99348000a5" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 600.889929] env[70020]: DEBUG nova.network.neutron [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 600.956549] env[70020]: DEBUG nova.compute.manager [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 600.959459] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 600.991695] env[70020]: DEBUG nova.virt.hardware [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 600.992236] env[70020]: DEBUG nova.virt.hardware [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 600.992544] env[70020]: DEBUG nova.virt.hardware [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 600.992932] env[70020]: DEBUG nova.virt.hardware [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 600.993288] env[70020]: DEBUG nova.virt.hardware [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 600.993697] env[70020]: DEBUG nova.virt.hardware [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 600.994026] env[70020]: DEBUG nova.virt.hardware [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 600.994306] env[70020]: DEBUG nova.virt.hardware [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 600.994572] env[70020]: DEBUG nova.virt.hardware [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 600.994836] env[70020]: DEBUG nova.virt.hardware [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 600.995119] env[70020]: DEBUG nova.virt.hardware [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 600.996098] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f757700-8f35-455e-81c4-dc66e65a4acd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.010775] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50374cfc-e17d-408d-8886-5b6693c5d46a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.229021] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.312s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 601.229021] env[70020]: DEBUG nova.compute.manager [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 601.233070] env[70020]: DEBUG oslo_concurrency.lockutils [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.719s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 601.235526] env[70020]: INFO nova.compute.claims [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 601.315113] env[70020]: DEBUG nova.network.neutron [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Successfully created port: 9ead2454-433c-40e4-962b-8e43443376bb {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 601.457160] env[70020]: DEBUG oslo_vmware.rw_handles [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Completed reading data from the image iterator. {{(pid=70020) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 601.457285] env[70020]: DEBUG oslo_vmware.rw_handles [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/de86a681-3e21-4eca-87c8-3488087d180e/c9cd83bf-fd12-4173-a067-f57d38f23556/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=70020) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 601.475926] env[70020]: DEBUG nova.network.neutron [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 601.591045] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Downloaded image file data c9cd83bf-fd12-4173-a067-f57d38f23556 to vmware_temp/de86a681-3e21-4eca-87c8-3488087d180e/c9cd83bf-fd12-4173-a067-f57d38f23556/tmp-sparse.vmdk on the data store datastore2 {{(pid=70020) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 601.592731] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Caching image {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 601.592972] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Copying Virtual Disk [datastore2] vmware_temp/de86a681-3e21-4eca-87c8-3488087d180e/c9cd83bf-fd12-4173-a067-f57d38f23556/tmp-sparse.vmdk to [datastore2] vmware_temp/de86a681-3e21-4eca-87c8-3488087d180e/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 601.594425] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-418bb4d7-560c-476b-981e-4a49d66945c9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.603658] env[70020]: DEBUG oslo_vmware.api [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Waiting for the task: (returnval){ [ 601.603658] env[70020]: value = "task-3617613" [ 601.603658] env[70020]: _type = "Task" [ 601.603658] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.612430] env[70020]: DEBUG oslo_vmware.api [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Task: {'id': task-3617613, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.744667] env[70020]: DEBUG nova.compute.utils [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 601.747234] env[70020]: DEBUG nova.compute.manager [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 601.747234] env[70020]: DEBUG nova.network.neutron [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 601.796159] env[70020]: DEBUG nova.network.neutron [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Updating instance_info_cache with network_info: [{"id": "eb757fe7-6cda-466e-9979-29e56b057f1c", "address": "fa:16:3e:04:16:f6", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.140", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb757fe7-6c", "ovs_interfaceid": "eb757fe7-6cda-466e-9979-29e56b057f1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 601.851081] env[70020]: DEBUG nova.policy [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7eef18a9e7e94e9a8304272a7195e9fb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '45b60595b57a44f1b620892bac738904', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 602.117531] env[70020]: DEBUG oslo_vmware.api [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Task: {'id': task-3617613, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.256699] env[70020]: DEBUG nova.compute.manager [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 602.300936] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Releasing lock "refresh_cache-01773af2-4ce2-4d2a-b334-ab99348000a5" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 602.300936] env[70020]: DEBUG nova.compute.manager [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Instance network_info: |[{"id": "eb757fe7-6cda-466e-9979-29e56b057f1c", "address": "fa:16:3e:04:16:f6", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.140", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb757fe7-6c", "ovs_interfaceid": "eb757fe7-6cda-466e-9979-29e56b057f1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 602.301414] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:04:16:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '822050c7-1845-485d-b87e-73778d21c33c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eb757fe7-6cda-466e-9979-29e56b057f1c', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 602.309515] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Creating folder: Project (aac90c2646b14eb5abe9caa687968c63). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 602.309867] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b3039413-cc60-47b3-b61a-58c22e8f1762 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.320952] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Created folder: Project (aac90c2646b14eb5abe9caa687968c63) in parent group-v721521. [ 602.320952] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Creating folder: Instances. Parent ref: group-v721525. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 602.320952] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-addbcbd3-0e0c-43a9-87b0-f7453384cb00 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.333897] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Created folder: Instances in parent group-v721525. [ 602.334149] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 602.334335] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 602.334541] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-65a1ece1-7197-4fcd-b116-59725e63c056 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.365200] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 602.365200] env[70020]: value = "task-3617616" [ 602.365200] env[70020]: _type = "Task" [ 602.365200] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.372295] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617616, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.407183] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6024363d-bb5b-467c-95a4-7428bf20340b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.415028] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b05b0e0-d2bd-43f8-b5c4-499ed5313535 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.454103] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a23d2fd8-5f57-4397-bd4b-4bba290e38f4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.460149] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aed3cc6d-e546-420b-b332-ed411ee6d8ef {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.479219] env[70020]: DEBUG nova.compute.provider_tree [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 602.623394] env[70020]: DEBUG oslo_vmware.api [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Task: {'id': task-3617613, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.668786} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.623791] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Copied Virtual Disk [datastore2] vmware_temp/de86a681-3e21-4eca-87c8-3488087d180e/c9cd83bf-fd12-4173-a067-f57d38f23556/tmp-sparse.vmdk to [datastore2] vmware_temp/de86a681-3e21-4eca-87c8-3488087d180e/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 602.623863] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Deleting the datastore file [datastore2] vmware_temp/de86a681-3e21-4eca-87c8-3488087d180e/c9cd83bf-fd12-4173-a067-f57d38f23556/tmp-sparse.vmdk {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 602.624212] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1d181f38-4615-4522-9e5a-6469a48834c8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.631690] env[70020]: DEBUG oslo_vmware.api [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Waiting for the task: (returnval){ [ 602.631690] env[70020]: value = "task-3617617" [ 602.631690] env[70020]: _type = "Task" [ 602.631690] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.642296] env[70020]: DEBUG oslo_vmware.api [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Task: {'id': task-3617617, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.694919] env[70020]: DEBUG nova.network.neutron [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Successfully created port: 154faccc-5d99-43cc-a66e-9c06bcc5fdf2 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 602.875794] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617616, 'name': CreateVM_Task, 'duration_secs': 0.390609} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.875794] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 602.889741] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.889936] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 602.890348] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 602.890625] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5460ea6-6648-44b2-876b-3e5852e226e3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.897386] env[70020]: DEBUG oslo_vmware.api [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Waiting for the task: (returnval){ [ 602.897386] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5257a434-a193-13a4-dd69-bfe3d6173b4b" [ 602.897386] env[70020]: _type = "Task" [ 602.897386] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.905791] env[70020]: DEBUG oslo_vmware.api [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5257a434-a193-13a4-dd69-bfe3d6173b4b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.983078] env[70020]: DEBUG nova.scheduler.client.report [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 603.148188] env[70020]: DEBUG oslo_vmware.api [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Task: {'id': task-3617617, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.030707} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.148542] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 603.148723] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Moving file from [datastore2] vmware_temp/de86a681-3e21-4eca-87c8-3488087d180e/c9cd83bf-fd12-4173-a067-f57d38f23556 to [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556. {{(pid=70020) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 603.149208] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-d831e69c-66d9-43a1-9ed0-13dcd890a751 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.161932] env[70020]: DEBUG oslo_vmware.api [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Waiting for the task: (returnval){ [ 603.161932] env[70020]: value = "task-3617618" [ 603.161932] env[70020]: _type = "Task" [ 603.161932] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.171855] env[70020]: DEBUG oslo_vmware.api [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Task: {'id': task-3617618, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.268464] env[70020]: DEBUG nova.compute.manager [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 603.304390] env[70020]: DEBUG nova.virt.hardware [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 603.304561] env[70020]: DEBUG nova.virt.hardware [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 603.305334] env[70020]: DEBUG nova.virt.hardware [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 603.305334] env[70020]: DEBUG nova.virt.hardware [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 603.305334] env[70020]: DEBUG nova.virt.hardware [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 603.305334] env[70020]: DEBUG nova.virt.hardware [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 603.305514] env[70020]: DEBUG nova.virt.hardware [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 603.305547] env[70020]: DEBUG nova.virt.hardware [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 603.305768] env[70020]: DEBUG nova.virt.hardware [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 603.305947] env[70020]: DEBUG nova.virt.hardware [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 603.307640] env[70020]: DEBUG nova.virt.hardware [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 603.307640] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d50087e-6790-4a31-8e6f-28b6c6714984 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.319426] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a220ba64-c97c-4419-a2e3-eef67577e1fa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.417787] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 603.418057] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 603.419267] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.488927] env[70020]: DEBUG oslo_concurrency.lockutils [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.253s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 603.488927] env[70020]: DEBUG nova.compute.manager [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 603.493424] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 3.829s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 603.493605] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 603.493754] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=70020) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 603.494049] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.808s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 603.495859] env[70020]: INFO nova.compute.claims [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 603.499869] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b9d5c66-7a85-4f6f-864c-e38a778b348c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.517240] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-875a5616-3fbc-4299-b15a-ac6f162f2b42 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.542341] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14807c8e-3898-4fca-8c36-aa350596ebe1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.553040] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1c8eeac-944c-46a1-b633-5c371aa0bcca {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.584653] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180926MB free_disk=76GB free_vcpus=48 pci_devices=None {{(pid=70020) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 603.584835] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 603.676312] env[70020]: DEBUG oslo_vmware.api [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Task: {'id': task-3617618, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.044382} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.677053] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] File moved {{(pid=70020) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 603.677573] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Cleaning up location [datastore2] vmware_temp/de86a681-3e21-4eca-87c8-3488087d180e {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 603.677956] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Deleting the datastore file [datastore2] vmware_temp/de86a681-3e21-4eca-87c8-3488087d180e {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 603.678435] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c1ea5185-f58e-40ad-aefa-d2e4b8715b11 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.688844] env[70020]: DEBUG oslo_vmware.api [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Waiting for the task: (returnval){ [ 603.688844] env[70020]: value = "task-3617619" [ 603.688844] env[70020]: _type = "Task" [ 603.688844] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.703133] env[70020]: DEBUG oslo_vmware.api [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Task: {'id': task-3617619, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.001629] env[70020]: DEBUG nova.compute.utils [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 604.003540] env[70020]: DEBUG nova.compute.manager [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 604.006663] env[70020]: DEBUG nova.network.neutron [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 604.127826] env[70020]: DEBUG nova.policy [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f1cf6392333e4658aaaa9cdadc9cad61', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3454ca4e376d4017891025c3a36cebf8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 604.198914] env[70020]: DEBUG oslo_vmware.api [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Task: {'id': task-3617619, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.032593} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.199190] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 604.200385] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cdc891ec-9cf1-46b0-a412-51ab8584358a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.209076] env[70020]: DEBUG oslo_vmware.api [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Waiting for the task: (returnval){ [ 604.209076] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]528b490a-2869-3d56-ce9c-46c7a6068b7b" [ 604.209076] env[70020]: _type = "Task" [ 604.209076] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.220956] env[70020]: DEBUG oslo_vmware.api [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]528b490a-2869-3d56-ce9c-46c7a6068b7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.507425] env[70020]: DEBUG nova.compute.manager [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 604.635115] env[70020]: DEBUG nova.compute.manager [req-8370115c-19a4-443a-bb2b-befc5c644b6b req-964d3881-e328-4d82-8fa5-a9ed83f63968 service nova] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Received event network-changed-eb757fe7-6cda-466e-9979-29e56b057f1c {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 604.635115] env[70020]: DEBUG nova.compute.manager [req-8370115c-19a4-443a-bb2b-befc5c644b6b req-964d3881-e328-4d82-8fa5-a9ed83f63968 service nova] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Refreshing instance network info cache due to event network-changed-eb757fe7-6cda-466e-9979-29e56b057f1c. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 604.635115] env[70020]: DEBUG oslo_concurrency.lockutils [req-8370115c-19a4-443a-bb2b-befc5c644b6b req-964d3881-e328-4d82-8fa5-a9ed83f63968 service nova] Acquiring lock "refresh_cache-01773af2-4ce2-4d2a-b334-ab99348000a5" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.635115] env[70020]: DEBUG oslo_concurrency.lockutils [req-8370115c-19a4-443a-bb2b-befc5c644b6b req-964d3881-e328-4d82-8fa5-a9ed83f63968 service nova] Acquired lock "refresh_cache-01773af2-4ce2-4d2a-b334-ab99348000a5" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 604.635115] env[70020]: DEBUG nova.network.neutron [req-8370115c-19a4-443a-bb2b-befc5c644b6b req-964d3881-e328-4d82-8fa5-a9ed83f63968 service nova] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Refreshing network info cache for port eb757fe7-6cda-466e-9979-29e56b057f1c {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 604.659243] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-829f49dc-75e9-4746-8433-012ce3156f33 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.668013] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3604fb1e-2006-452a-9f01-51045b182196 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.699207] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04b70236-4d12-4f73-9508-43272f7b8e5d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.707695] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e170a84-7522-4099-9e83-6f3547a15afd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.726174] env[70020]: DEBUG nova.compute.provider_tree [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 604.740323] env[70020]: DEBUG oslo_vmware.api [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]528b490a-2869-3d56-ce9c-46c7a6068b7b, 'name': SearchDatastore_Task, 'duration_secs': 0.015206} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.740941] env[70020]: DEBUG oslo_concurrency.lockutils [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 604.741476] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 0cc49db6-1574-4e51-8692-b79ee14bc25d/0cc49db6-1574-4e51-8692-b79ee14bc25d.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 604.741638] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 604.741765] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 604.741987] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bd852a22-862f-475b-b4c8-351dc067ab20 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.745950] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-22b7e8fb-b53d-4731-bf9d-d8e57a8249a9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.753267] env[70020]: DEBUG oslo_vmware.api [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Waiting for the task: (returnval){ [ 604.753267] env[70020]: value = "task-3617620" [ 604.753267] env[70020]: _type = "Task" [ 604.753267] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.759952] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 604.759952] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 604.759952] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2798d95b-1a4b-47c7-8e2f-50e1038f092d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.766677] env[70020]: DEBUG oslo_vmware.api [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Task: {'id': task-3617620, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.769138] env[70020]: DEBUG oslo_vmware.api [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Waiting for the task: (returnval){ [ 604.769138] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5291bc17-f839-5431-e22d-fa0458da4f24" [ 604.769138] env[70020]: _type = "Task" [ 604.769138] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.781054] env[70020]: DEBUG oslo_vmware.api [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5291bc17-f839-5431-e22d-fa0458da4f24, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.021689] env[70020]: DEBUG nova.network.neutron [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Successfully created port: c7c80204-b8b4-46c6-8d93-38d4879119d3 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 605.233753] env[70020]: DEBUG nova.scheduler.client.report [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 605.265340] env[70020]: DEBUG oslo_vmware.api [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Task: {'id': task-3617620, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.284066] env[70020]: DEBUG oslo_vmware.api [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5291bc17-f839-5431-e22d-fa0458da4f24, 'name': SearchDatastore_Task, 'duration_secs': 0.015497} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.284066] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf0c8207-ba9e-4906-8eca-818f8a1fe00a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.287647] env[70020]: DEBUG oslo_vmware.api [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Waiting for the task: (returnval){ [ 605.287647] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]524a7855-fc6b-f494-d5af-aaaea94baede" [ 605.287647] env[70020]: _type = "Task" [ 605.287647] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.297074] env[70020]: DEBUG oslo_vmware.api [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]524a7855-fc6b-f494-d5af-aaaea94baede, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.524914] env[70020]: DEBUG nova.compute.manager [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 605.564404] env[70020]: DEBUG nova.virt.hardware [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 605.564670] env[70020]: DEBUG nova.virt.hardware [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 605.564838] env[70020]: DEBUG nova.virt.hardware [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 605.565033] env[70020]: DEBUG nova.virt.hardware [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 605.565183] env[70020]: DEBUG nova.virt.hardware [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 605.565328] env[70020]: DEBUG nova.virt.hardware [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 605.565537] env[70020]: DEBUG nova.virt.hardware [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 605.565701] env[70020]: DEBUG nova.virt.hardware [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 605.565865] env[70020]: DEBUG nova.virt.hardware [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 605.566039] env[70020]: DEBUG nova.virt.hardware [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 605.566258] env[70020]: DEBUG nova.virt.hardware [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 605.567998] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c377345a-eb26-429a-9662-695f3c1549be {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.578769] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f56e0ff-c3d4-4215-8fbd-14b5902adf12 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.739101] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.245s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 605.739677] env[70020]: DEBUG nova.compute.manager [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 605.743416] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.784s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 605.747823] env[70020]: INFO nova.compute.claims [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 605.766915] env[70020]: DEBUG oslo_vmware.api [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Task: {'id': task-3617620, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.948419} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.768076] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 0cc49db6-1574-4e51-8692-b79ee14bc25d/0cc49db6-1574-4e51-8692-b79ee14bc25d.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 605.769100] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 605.769100] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-18512246-b911-415b-a6ac-c59f9c0df874 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.778828] env[70020]: DEBUG oslo_vmware.api [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Waiting for the task: (returnval){ [ 605.778828] env[70020]: value = "task-3617621" [ 605.778828] env[70020]: _type = "Task" [ 605.778828] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.788421] env[70020]: DEBUG oslo_vmware.api [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Task: {'id': task-3617621, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.798894] env[70020]: DEBUG oslo_vmware.api [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]524a7855-fc6b-f494-d5af-aaaea94baede, 'name': SearchDatastore_Task, 'duration_secs': 0.027341} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.799040] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 605.799240] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 01773af2-4ce2-4d2a-b334-ab99348000a5/01773af2-4ce2-4d2a-b334-ab99348000a5.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 605.799755] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6132f39c-7605-4b75-8934-dc7a12bb411c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.806418] env[70020]: DEBUG oslo_vmware.api [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Waiting for the task: (returnval){ [ 605.806418] env[70020]: value = "task-3617622" [ 605.806418] env[70020]: _type = "Task" [ 605.806418] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.814766] env[70020]: DEBUG oslo_vmware.api [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Task: {'id': task-3617622, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.203422] env[70020]: DEBUG nova.network.neutron [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Successfully updated port: 154faccc-5d99-43cc-a66e-9c06bcc5fdf2 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 606.252486] env[70020]: DEBUG nova.compute.utils [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 606.257386] env[70020]: DEBUG nova.compute.manager [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 606.258427] env[70020]: DEBUG nova.network.neutron [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 606.301726] env[70020]: DEBUG oslo_vmware.api [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Task: {'id': task-3617621, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067331} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.302082] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 606.303894] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bf4bf32-90ca-4a5a-975d-12e8b758a4bc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.338085] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Reconfiguring VM instance instance-00000002 to attach disk [datastore2] 0cc49db6-1574-4e51-8692-b79ee14bc25d/0cc49db6-1574-4e51-8692-b79ee14bc25d.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 606.341668] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f841dee7-89b9-47fb-95a6-29ea45784bf3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.359352] env[70020]: DEBUG oslo_vmware.api [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Task: {'id': task-3617622, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.365331] env[70020]: DEBUG oslo_vmware.api [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Waiting for the task: (returnval){ [ 606.365331] env[70020]: value = "task-3617623" [ 606.365331] env[70020]: _type = "Task" [ 606.365331] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.374843] env[70020]: DEBUG oslo_vmware.api [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Task: {'id': task-3617623, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.404321] env[70020]: DEBUG nova.network.neutron [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Successfully updated port: 9ead2454-433c-40e4-962b-8e43443376bb {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 606.445206] env[70020]: DEBUG nova.network.neutron [req-8370115c-19a4-443a-bb2b-befc5c644b6b req-964d3881-e328-4d82-8fa5-a9ed83f63968 service nova] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Updated VIF entry in instance network info cache for port eb757fe7-6cda-466e-9979-29e56b057f1c. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 606.445585] env[70020]: DEBUG nova.network.neutron [req-8370115c-19a4-443a-bb2b-befc5c644b6b req-964d3881-e328-4d82-8fa5-a9ed83f63968 service nova] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Updating instance_info_cache with network_info: [{"id": "eb757fe7-6cda-466e-9979-29e56b057f1c", "address": "fa:16:3e:04:16:f6", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.140", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb757fe7-6c", "ovs_interfaceid": "eb757fe7-6cda-466e-9979-29e56b057f1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.453542] env[70020]: DEBUG nova.policy [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1a50b41a53354ffc9b89ea3d769590d2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f81ac3e65f9042f4bcf818cd216a32eb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 606.709042] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Acquiring lock "refresh_cache-516341a3-2230-4340-a1e0-ff97bb7a608d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.709042] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Acquired lock "refresh_cache-516341a3-2230-4340-a1e0-ff97bb7a608d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 606.709042] env[70020]: DEBUG nova.network.neutron [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 606.760944] env[70020]: DEBUG nova.compute.manager [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 606.824763] env[70020]: DEBUG oslo_vmware.api [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Task: {'id': task-3617622, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.53828} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.824763] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 01773af2-4ce2-4d2a-b334-ab99348000a5/01773af2-4ce2-4d2a-b334-ab99348000a5.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 606.824763] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 606.824763] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eef02563-575c-46d2-8b97-3c4b9f0073e6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.830987] env[70020]: DEBUG oslo_vmware.api [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Waiting for the task: (returnval){ [ 606.830987] env[70020]: value = "task-3617624" [ 606.830987] env[70020]: _type = "Task" [ 606.830987] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.846660] env[70020]: DEBUG oslo_vmware.api [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Task: {'id': task-3617624, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.881252] env[70020]: DEBUG oslo_vmware.api [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Task: {'id': task-3617623, 'name': ReconfigVM_Task, 'duration_secs': 0.280333} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.881252] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Reconfigured VM instance instance-00000002 to attach disk [datastore2] 0cc49db6-1574-4e51-8692-b79ee14bc25d/0cc49db6-1574-4e51-8692-b79ee14bc25d.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 606.884198] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0fa3a2cd-8040-4e47-a04d-b6c8e7ffb4b7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.893072] env[70020]: DEBUG oslo_vmware.api [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Waiting for the task: (returnval){ [ 606.893072] env[70020]: value = "task-3617625" [ 606.893072] env[70020]: _type = "Task" [ 606.893072] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.907468] env[70020]: DEBUG oslo_vmware.api [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Task: {'id': task-3617625, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.910329] env[70020]: DEBUG oslo_concurrency.lockutils [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "refresh_cache-81d5a1b4-1398-4fca-b500-aa2a3dc41494" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.910329] env[70020]: DEBUG oslo_concurrency.lockutils [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquired lock "refresh_cache-81d5a1b4-1398-4fca-b500-aa2a3dc41494" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 606.910329] env[70020]: DEBUG nova.network.neutron [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 606.949109] env[70020]: DEBUG oslo_concurrency.lockutils [req-8370115c-19a4-443a-bb2b-befc5c644b6b req-964d3881-e328-4d82-8fa5-a9ed83f63968 service nova] Releasing lock "refresh_cache-01773af2-4ce2-4d2a-b334-ab99348000a5" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 606.995540] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebb745d9-9657-45ac-95a3-d11c74b8ae07 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.008341] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19e850da-6dcb-4318-93d1-ca9d59a73acf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.046754] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89e8634f-0ad8-42e1-96c8-9ef8565d1d5b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.057544] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed986e18-953d-475a-933c-dbea966e81ff {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.079992] env[70020]: DEBUG nova.compute.provider_tree [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 607.177897] env[70020]: DEBUG oslo_concurrency.lockutils [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Acquiring lock "bb4e4986-af2a-4832-9ec7-777bca863dce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 607.178471] env[70020]: DEBUG oslo_concurrency.lockutils [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Lock "bb4e4986-af2a-4832-9ec7-777bca863dce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 607.345835] env[70020]: DEBUG oslo_vmware.api [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Task: {'id': task-3617624, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.233797} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.346115] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 607.346896] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e615fa5a-7c2e-4e8e-a60e-d950b07ca011 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.352036] env[70020]: DEBUG nova.network.neutron [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 607.374562] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Reconfiguring VM instance instance-00000001 to attach disk [datastore2] 01773af2-4ce2-4d2a-b334-ab99348000a5/01773af2-4ce2-4d2a-b334-ab99348000a5.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 607.375369] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-59d25c56-ab00-4ba4-9db2-5c41a4531e6c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.401130] env[70020]: DEBUG oslo_vmware.api [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Waiting for the task: (returnval){ [ 607.401130] env[70020]: value = "task-3617626" [ 607.401130] env[70020]: _type = "Task" [ 607.401130] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.410706] env[70020]: DEBUG oslo_vmware.api [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Task: {'id': task-3617625, 'name': Rename_Task, 'duration_secs': 0.222391} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.412826] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 607.412826] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c977a80b-220b-4f2c-b5ac-2ad1ef13b8e3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.419699] env[70020]: DEBUG oslo_vmware.api [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Task: {'id': task-3617626, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.426079] env[70020]: DEBUG oslo_vmware.api [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Waiting for the task: (returnval){ [ 607.426079] env[70020]: value = "task-3617627" [ 607.426079] env[70020]: _type = "Task" [ 607.426079] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.435135] env[70020]: DEBUG oslo_vmware.api [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Task: {'id': task-3617627, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.585292] env[70020]: DEBUG nova.scheduler.client.report [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 607.681524] env[70020]: DEBUG nova.compute.manager [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 607.748630] env[70020]: DEBUG nova.network.neutron [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 607.789631] env[70020]: DEBUG nova.compute.manager [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 607.828857] env[70020]: DEBUG nova.virt.hardware [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 607.829436] env[70020]: DEBUG nova.virt.hardware [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 607.829615] env[70020]: DEBUG nova.virt.hardware [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 607.829782] env[70020]: DEBUG nova.virt.hardware [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 607.830056] env[70020]: DEBUG nova.virt.hardware [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 607.830312] env[70020]: DEBUG nova.virt.hardware [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 607.832161] env[70020]: DEBUG nova.virt.hardware [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 607.832406] env[70020]: DEBUG nova.virt.hardware [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 607.832911] env[70020]: DEBUG nova.virt.hardware [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 607.832911] env[70020]: DEBUG nova.virt.hardware [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 607.832911] env[70020]: DEBUG nova.virt.hardware [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 607.833838] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d3a37c8-9a09-49cd-a2be-924049ad7bb5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.842112] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd5ab685-4708-44a2-904c-e8b844245a5b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.911535] env[70020]: DEBUG oslo_vmware.api [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Task: {'id': task-3617626, 'name': ReconfigVM_Task, 'duration_secs': 0.338566} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.911924] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Reconfigured VM instance instance-00000001 to attach disk [datastore2] 01773af2-4ce2-4d2a-b334-ab99348000a5/01773af2-4ce2-4d2a-b334-ab99348000a5.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 607.912609] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-89f92608-73ed-42ae-a337-ad6e15f2fc65 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.918627] env[70020]: DEBUG oslo_vmware.api [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Waiting for the task: (returnval){ [ 607.918627] env[70020]: value = "task-3617628" [ 607.918627] env[70020]: _type = "Task" [ 607.918627] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.926557] env[70020]: DEBUG oslo_vmware.api [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Task: {'id': task-3617628, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.933930] env[70020]: DEBUG oslo_vmware.api [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Task: {'id': task-3617627, 'name': PowerOnVM_Task, 'duration_secs': 0.455251} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.934063] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 607.934425] env[70020]: INFO nova.compute.manager [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Took 9.23 seconds to spawn the instance on the hypervisor. [ 607.934667] env[70020]: DEBUG nova.compute.manager [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 607.935656] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cee4eb5-b204-4f10-afc2-8457be02e06f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.070185] env[70020]: DEBUG nova.network.neutron [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Successfully created port: 27c6992d-5e25-418c-83e7-a49ce44dee0e {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 608.091731] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.348s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 608.095210] env[70020]: DEBUG nova.compute.manager [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 608.099083] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 4.512s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 608.196235] env[70020]: DEBUG nova.network.neutron [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Updating instance_info_cache with network_info: [{"id": "154faccc-5d99-43cc-a66e-9c06bcc5fdf2", "address": "fa:16:3e:99:15:a0", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap154faccc-5d", "ovs_interfaceid": "154faccc-5d99-43cc-a66e-9c06bcc5fdf2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.208848] env[70020]: DEBUG oslo_concurrency.lockutils [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 608.430661] env[70020]: DEBUG oslo_vmware.api [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Task: {'id': task-3617628, 'name': Rename_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.434546] env[70020]: DEBUG nova.network.neutron [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Updating instance_info_cache with network_info: [{"id": "9ead2454-433c-40e4-962b-8e43443376bb", "address": "fa:16:3e:53:8e:9e", "network": {"id": "8a7c653b-f915-404f-a4ab-bea48b7e6574", "bridge": "br-int", "label": "tempest-ImagesTestJSON-377932844-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52cd193f3ca7403a986d72f072590f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ead2454-43", "ovs_interfaceid": "9ead2454-433c-40e4-962b-8e43443376bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.458870] env[70020]: INFO nova.compute.manager [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Took 15.90 seconds to build instance. [ 608.601561] env[70020]: DEBUG nova.compute.utils [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 608.606992] env[70020]: DEBUG nova.compute.manager [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Not allocating networking since 'none' was specified. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 608.702471] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Releasing lock "refresh_cache-516341a3-2230-4340-a1e0-ff97bb7a608d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 608.702800] env[70020]: DEBUG nova.compute.manager [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Instance network_info: |[{"id": "154faccc-5d99-43cc-a66e-9c06bcc5fdf2", "address": "fa:16:3e:99:15:a0", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap154faccc-5d", "ovs_interfaceid": "154faccc-5d99-43cc-a66e-9c06bcc5fdf2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 608.703250] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:99:15:a0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '822050c7-1845-485d-b87e-73778d21c33c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '154faccc-5d99-43cc-a66e-9c06bcc5fdf2', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 608.714078] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Creating folder: Project (45b60595b57a44f1b620892bac738904). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 608.714415] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a0ccd104-e9e9-4057-8837-c052e55bf007 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.727864] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Acquiring lock "372e5569-8824-4841-b3d6-4b07423c7b3d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 608.728016] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Lock "372e5569-8824-4841-b3d6-4b07423c7b3d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 608.732842] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Created folder: Project (45b60595b57a44f1b620892bac738904) in parent group-v721521. [ 608.732842] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Creating folder: Instances. Parent ref: group-v721528. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 608.732842] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cb903daf-d247-40da-9568-a5b188eed2b9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.741610] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Created folder: Instances in parent group-v721528. [ 608.741844] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 608.742040] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 608.742245] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5e7a035c-1458-4c02-b0cf-ef52f0c0800d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.764630] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 608.764630] env[70020]: value = "task-3617631" [ 608.764630] env[70020]: _type = "Task" [ 608.764630] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.773617] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617631, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.885769] env[70020]: DEBUG nova.network.neutron [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Successfully updated port: c7c80204-b8b4-46c6-8d93-38d4879119d3 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 608.936220] env[70020]: DEBUG oslo_vmware.api [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Task: {'id': task-3617628, 'name': Rename_Task, 'duration_secs': 0.893963} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.936962] env[70020]: DEBUG oslo_concurrency.lockutils [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Releasing lock "refresh_cache-81d5a1b4-1398-4fca-b500-aa2a3dc41494" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 608.939359] env[70020]: DEBUG nova.compute.manager [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Instance network_info: |[{"id": "9ead2454-433c-40e4-962b-8e43443376bb", "address": "fa:16:3e:53:8e:9e", "network": {"id": "8a7c653b-f915-404f-a4ab-bea48b7e6574", "bridge": "br-int", "label": "tempest-ImagesTestJSON-377932844-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52cd193f3ca7403a986d72f072590f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ead2454-43", "ovs_interfaceid": "9ead2454-433c-40e4-962b-8e43443376bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 608.939833] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 608.944021] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:53:8e:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a0734cc4-5718-45e2-9f98-0ded96880bef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9ead2454-433c-40e4-962b-8e43443376bb', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 608.956777] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Creating folder: Project (52cd193f3ca7403a986d72f072590f4f). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 608.958893] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ed353a24-b108-4f2e-bce6-1659193aedfb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.959802] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aefb0cb9-92db-4a72-b527-462a0f7cfd36 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.962074] env[70020]: DEBUG oslo_concurrency.lockutils [None req-812ffcf4-0601-4f18-990f-05e1e7836ad7 tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Lock "0cc49db6-1574-4e51-8692-b79ee14bc25d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.408s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 608.968617] env[70020]: DEBUG oslo_vmware.api [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Waiting for the task: (returnval){ [ 608.968617] env[70020]: value = "task-3617633" [ 608.968617] env[70020]: _type = "Task" [ 608.968617] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.974304] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Created folder: Project (52cd193f3ca7403a986d72f072590f4f) in parent group-v721521. [ 608.974481] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Creating folder: Instances. Parent ref: group-v721531. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 608.975100] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-252c4e3e-daea-4a6b-8c91-8e54f293e704 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.979942] env[70020]: DEBUG oslo_vmware.api [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Task: {'id': task-3617633, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.987667] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Created folder: Instances in parent group-v721531. [ 608.987667] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 608.987667] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 608.987667] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fb844764-3a1a-41bf-8305-daeb43d2da25 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.008988] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 609.008988] env[70020]: value = "task-3617635" [ 609.008988] env[70020]: _type = "Task" [ 609.008988] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.022638] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617635, 'name': CreateVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.110406] env[70020]: DEBUG nova.compute.manager [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 609.126093] env[70020]: DEBUG nova.compute.manager [req-83cca633-f007-4fb1-b35f-ecac65fdb5c2 req-9f4347d8-9762-424c-9d7e-0dabd8b9d92e service nova] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Received event network-vif-plugged-9ead2454-433c-40e4-962b-8e43443376bb {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 609.126093] env[70020]: DEBUG oslo_concurrency.lockutils [req-83cca633-f007-4fb1-b35f-ecac65fdb5c2 req-9f4347d8-9762-424c-9d7e-0dabd8b9d92e service nova] Acquiring lock "81d5a1b4-1398-4fca-b500-aa2a3dc41494-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 609.126093] env[70020]: DEBUG oslo_concurrency.lockutils [req-83cca633-f007-4fb1-b35f-ecac65fdb5c2 req-9f4347d8-9762-424c-9d7e-0dabd8b9d92e service nova] Lock "81d5a1b4-1398-4fca-b500-aa2a3dc41494-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 609.126093] env[70020]: DEBUG oslo_concurrency.lockutils [req-83cca633-f007-4fb1-b35f-ecac65fdb5c2 req-9f4347d8-9762-424c-9d7e-0dabd8b9d92e service nova] Lock "81d5a1b4-1398-4fca-b500-aa2a3dc41494-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 609.126093] env[70020]: DEBUG nova.compute.manager [req-83cca633-f007-4fb1-b35f-ecac65fdb5c2 req-9f4347d8-9762-424c-9d7e-0dabd8b9d92e service nova] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] No waiting events found dispatching network-vif-plugged-9ead2454-433c-40e4-962b-8e43443376bb {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 609.126519] env[70020]: WARNING nova.compute.manager [req-83cca633-f007-4fb1-b35f-ecac65fdb5c2 req-9f4347d8-9762-424c-9d7e-0dabd8b9d92e service nova] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Received unexpected event network-vif-plugged-9ead2454-433c-40e4-962b-8e43443376bb for instance with vm_state building and task_state spawning. [ 609.158606] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 01773af2-4ce2-4d2a-b334-ab99348000a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 609.158842] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 81d5a1b4-1398-4fca-b500-aa2a3dc41494 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 609.159257] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 0cc49db6-1574-4e51-8692-b79ee14bc25d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 609.159257] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 516341a3-2230-4340-a1e0-ff97bb7a608d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 609.159257] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 6a114dce-7ed3-46e1-9d50-c3dd6efd340c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 609.159393] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 1f95bfa8-bc97-4ed7-8c33-c00297430bf5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 609.159644] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance b0b825d4-534d-4d54-a0c4-b9e507726c47 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 609.231860] env[70020]: DEBUG nova.compute.manager [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 609.274664] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617631, 'name': CreateVM_Task, 'duration_secs': 0.343482} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.274935] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 609.276067] env[70020]: DEBUG oslo_vmware.service [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1e224be-a5d0-45cc-8904-67bd2d63135d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.286269] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.286269] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 609.286269] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 609.286269] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b50a1af-1078-49cb-abbf-2c8829ba68af {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.294098] env[70020]: DEBUG oslo_vmware.api [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Waiting for the task: (returnval){ [ 609.294098] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52b93ac5-64f0-e179-0efb-3afb897d9c52" [ 609.294098] env[70020]: _type = "Task" [ 609.294098] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.302890] env[70020]: DEBUG oslo_vmware.api [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b93ac5-64f0-e179-0efb-3afb897d9c52, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.389660] env[70020]: DEBUG oslo_concurrency.lockutils [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Acquiring lock "refresh_cache-6a114dce-7ed3-46e1-9d50-c3dd6efd340c" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.389817] env[70020]: DEBUG oslo_concurrency.lockutils [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Acquired lock "refresh_cache-6a114dce-7ed3-46e1-9d50-c3dd6efd340c" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 609.389972] env[70020]: DEBUG nova.network.neutron [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 609.479655] env[70020]: DEBUG oslo_vmware.api [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Task: {'id': task-3617633, 'name': PowerOnVM_Task, 'duration_secs': 0.488412} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.481602] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 609.481602] env[70020]: INFO nova.compute.manager [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Took 13.00 seconds to spawn the instance on the hypervisor. [ 609.481602] env[70020]: DEBUG nova.compute.manager [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 609.481602] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6a6c289-20e7-4451-89ae-abf86bb9831a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.525471] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617635, 'name': CreateVM_Task, 'duration_secs': 0.370594} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.525680] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 609.528402] env[70020]: DEBUG oslo_concurrency.lockutils [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.528714] env[70020]: DEBUG oslo_concurrency.lockutils [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 609.528905] env[70020]: DEBUG oslo_concurrency.lockutils [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 609.529552] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea2b4f43-2862-4b0f-baec-cb3a15ddae06 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.537527] env[70020]: DEBUG oslo_vmware.api [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 609.537527] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52bcf42f-9d7a-a838-73a3-3e7ca0c1ff98" [ 609.537527] env[70020]: _type = "Task" [ 609.537527] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.547221] env[70020]: DEBUG nova.compute.manager [req-5c24ada2-d7b0-4659-be52-7623a52c5e63 req-faedc22e-3a3b-43a3-948b-c889f78ffb26 service nova] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Received event network-vif-plugged-154faccc-5d99-43cc-a66e-9c06bcc5fdf2 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 609.547221] env[70020]: DEBUG oslo_concurrency.lockutils [req-5c24ada2-d7b0-4659-be52-7623a52c5e63 req-faedc22e-3a3b-43a3-948b-c889f78ffb26 service nova] Acquiring lock "516341a3-2230-4340-a1e0-ff97bb7a608d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 609.547221] env[70020]: DEBUG oslo_concurrency.lockutils [req-5c24ada2-d7b0-4659-be52-7623a52c5e63 req-faedc22e-3a3b-43a3-948b-c889f78ffb26 service nova] Lock "516341a3-2230-4340-a1e0-ff97bb7a608d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 609.547221] env[70020]: DEBUG oslo_concurrency.lockutils [req-5c24ada2-d7b0-4659-be52-7623a52c5e63 req-faedc22e-3a3b-43a3-948b-c889f78ffb26 service nova] Lock "516341a3-2230-4340-a1e0-ff97bb7a608d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 609.547221] env[70020]: DEBUG nova.compute.manager [req-5c24ada2-d7b0-4659-be52-7623a52c5e63 req-faedc22e-3a3b-43a3-948b-c889f78ffb26 service nova] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] No waiting events found dispatching network-vif-plugged-154faccc-5d99-43cc-a66e-9c06bcc5fdf2 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 609.547432] env[70020]: WARNING nova.compute.manager [req-5c24ada2-d7b0-4659-be52-7623a52c5e63 req-faedc22e-3a3b-43a3-948b-c889f78ffb26 service nova] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Received unexpected event network-vif-plugged-154faccc-5d99-43cc-a66e-9c06bcc5fdf2 for instance with vm_state building and task_state spawning. [ 609.547432] env[70020]: DEBUG nova.compute.manager [req-5c24ada2-d7b0-4659-be52-7623a52c5e63 req-faedc22e-3a3b-43a3-948b-c889f78ffb26 service nova] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Received event network-changed-154faccc-5d99-43cc-a66e-9c06bcc5fdf2 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 609.548079] env[70020]: DEBUG nova.compute.manager [req-5c24ada2-d7b0-4659-be52-7623a52c5e63 req-faedc22e-3a3b-43a3-948b-c889f78ffb26 service nova] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Refreshing instance network info cache due to event network-changed-154faccc-5d99-43cc-a66e-9c06bcc5fdf2. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 609.548079] env[70020]: DEBUG oslo_concurrency.lockutils [req-5c24ada2-d7b0-4659-be52-7623a52c5e63 req-faedc22e-3a3b-43a3-948b-c889f78ffb26 service nova] Acquiring lock "refresh_cache-516341a3-2230-4340-a1e0-ff97bb7a608d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.548079] env[70020]: DEBUG oslo_concurrency.lockutils [req-5c24ada2-d7b0-4659-be52-7623a52c5e63 req-faedc22e-3a3b-43a3-948b-c889f78ffb26 service nova] Acquired lock "refresh_cache-516341a3-2230-4340-a1e0-ff97bb7a608d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 609.548079] env[70020]: DEBUG nova.network.neutron [req-5c24ada2-d7b0-4659-be52-7623a52c5e63 req-faedc22e-3a3b-43a3-948b-c889f78ffb26 service nova] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Refreshing network info cache for port 154faccc-5d99-43cc-a66e-9c06bcc5fdf2 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 609.555372] env[70020]: DEBUG oslo_vmware.api [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52bcf42f-9d7a-a838-73a3-3e7ca0c1ff98, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.664041] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance bb4e4986-af2a-4832-9ec7-777bca863dce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 609.770781] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 609.808562] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 609.808749] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 609.808982] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.809141] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 609.809318] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 609.809566] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6859cbe5-617a-441a-a7f4-cc50521187da {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.821609] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 609.821609] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 609.824018] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a032e236-375a-4b73-83b8-f2412b59e611 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.830325] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd85b560-12c7-4a59-b107-3f424dcd9f62 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.836668] env[70020]: DEBUG oslo_vmware.api [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Waiting for the task: (returnval){ [ 609.836668] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52fd632d-93f1-6a2d-34ce-380b9c481574" [ 609.836668] env[70020]: _type = "Task" [ 609.836668] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.849108] env[70020]: DEBUG oslo_vmware.api [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52fd632d-93f1-6a2d-34ce-380b9c481574, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.971163] env[70020]: DEBUG nova.network.neutron [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 610.007636] env[70020]: INFO nova.compute.manager [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Took 17.81 seconds to build instance. [ 610.059306] env[70020]: DEBUG oslo_vmware.api [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52bcf42f-9d7a-a838-73a3-3e7ca0c1ff98, 'name': SearchDatastore_Task, 'duration_secs': 0.011317} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.068049] env[70020]: DEBUG oslo_concurrency.lockutils [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 610.068303] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 610.068530] env[70020]: DEBUG oslo_concurrency.lockutils [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.068666] env[70020]: DEBUG oslo_concurrency.lockutils [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 610.068832] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 610.071037] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f1a7791b-73e0-4d98-a512-2cd2f98294ba {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.094221] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 610.094221] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 610.094507] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14fb12a5-201b-4e3b-a0bb-47a5109c18e9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.104774] env[70020]: DEBUG oslo_vmware.api [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 610.104774] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5289148e-6a26-cc7b-bdf9-2bdfaf08a51c" [ 610.104774] env[70020]: _type = "Task" [ 610.104774] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.115982] env[70020]: DEBUG oslo_vmware.api [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5289148e-6a26-cc7b-bdf9-2bdfaf08a51c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.120574] env[70020]: DEBUG nova.compute.manager [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 610.164578] env[70020]: DEBUG nova.virt.hardware [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 610.165653] env[70020]: DEBUG nova.virt.hardware [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 610.165653] env[70020]: DEBUG nova.virt.hardware [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 610.165653] env[70020]: DEBUG nova.virt.hardware [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 610.165653] env[70020]: DEBUG nova.virt.hardware [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 610.165653] env[70020]: DEBUG nova.virt.hardware [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 610.165903] env[70020]: DEBUG nova.virt.hardware [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 610.165903] env[70020]: DEBUG nova.virt.hardware [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 610.165991] env[70020]: DEBUG nova.virt.hardware [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 610.166109] env[70020]: DEBUG nova.virt.hardware [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 610.166410] env[70020]: DEBUG nova.virt.hardware [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 610.167138] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 372e5569-8824-4841-b3d6-4b07423c7b3d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 610.167374] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=70020) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 610.167485] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=70020) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 610.171123] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58835ca3-39c8-440c-9bde-cc637a13eeee {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.180613] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-212cf995-40d7-4dbc-8cd5-cef2a9fc3a84 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.201118] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Instance VIF info [] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 610.208103] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Creating folder: Project (f9640bb6e09f42e79c06abf5496425bc). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 610.208612] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3007bd1c-6b18-42af-b9c8-411558600d33 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.235149] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Created folder: Project (f9640bb6e09f42e79c06abf5496425bc) in parent group-v721521. [ 610.235149] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Creating folder: Instances. Parent ref: group-v721534. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 610.235149] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d5973e40-ebdc-427a-a3bd-4c0b0d2a5662 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.249109] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Created folder: Instances in parent group-v721534. [ 610.249109] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 610.249109] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 610.249109] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-61371fd7-7beb-40bd-b0c5-99d73d73d5b1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.279108] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 610.279108] env[70020]: value = "task-3617638" [ 610.279108] env[70020]: _type = "Task" [ 610.279108] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.294579] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617638, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.354996] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Preparing fetch location {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 610.355311] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Creating directory with path [datastore1] vmware_temp/8c118588-724e-42f8-b390-9eeb9d0ed498/c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 610.355551] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c80f8b18-0fc1-4b86-9d06-41e2ee09442b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.384809] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Created directory with path [datastore1] vmware_temp/8c118588-724e-42f8-b390-9eeb9d0ed498/c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 610.385362] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Fetch image to [datastore1] vmware_temp/8c118588-724e-42f8-b390-9eeb9d0ed498/c9cd83bf-fd12-4173-a067-f57d38f23556/tmp-sparse.vmdk {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 610.385893] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Downloading image file data c9cd83bf-fd12-4173-a067-f57d38f23556 to [datastore1] vmware_temp/8c118588-724e-42f8-b390-9eeb9d0ed498/c9cd83bf-fd12-4173-a067-f57d38f23556/tmp-sparse.vmdk on the data store datastore1 {{(pid=70020) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 610.388649] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5248afb9-49aa-480f-bb2c-2122226862ec {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.397895] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc79bf4e-64fb-4630-9e4a-302074daf533 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.418397] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7604fc7e-bf3e-48bd-bb28-4be9747080b4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.458645] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e2406d0-914f-4175-987a-45ab3b129ac4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.465684] env[70020]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8d911af0-d6f8-4a63-9958-02ec1cb7a8c2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.476129] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eefeb1b-a022-49be-b8b1-3e483bd36860 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.484133] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a55a660-5600-4099-a35c-e1c4cd6350d1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.527075] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5571eb28-687c-46fb-954a-f6a91c63faea tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Lock "01773af2-4ce2-4d2a-b334-ab99348000a5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.344s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 610.529939] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a681def-05b6-4a0f-a26d-17b8eb60842c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.535918] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Downloading image file data c9cd83bf-fd12-4173-a067-f57d38f23556 to the data store datastore1 {{(pid=70020) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 610.536839] env[70020]: DEBUG nova.network.neutron [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Updating instance_info_cache with network_info: [{"id": "c7c80204-b8b4-46c6-8d93-38d4879119d3", "address": "fa:16:3e:20:f4:6e", "network": {"id": "e0413f92-dcf6-413d-b61f-14b064f9a1d8", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-493233975-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3454ca4e376d4017891025c3a36cebf8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "778b9a40-d603-4765-ac88-bd6d42c457a2", "external-id": "nsx-vlan-transportzone-114", "segmentation_id": 114, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7c80204-b8", "ovs_interfaceid": "c7c80204-b8b4-46c6-8d93-38d4879119d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 610.544047] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48d9eb89-1579-4823-be96-bead719f34be {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.563567] env[70020]: DEBUG nova.compute.provider_tree [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 610.620148] env[70020]: DEBUG oslo_vmware.rw_handles [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8c118588-724e-42f8-b390-9eeb9d0ed498/c9cd83bf-fd12-4173-a067-f57d38f23556/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=70020) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 610.623459] env[70020]: DEBUG oslo_vmware.api [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5289148e-6a26-cc7b-bdf9-2bdfaf08a51c, 'name': SearchDatastore_Task, 'duration_secs': 0.036098} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.677310] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-790a1836-2596-4173-88a5-174a31dd370b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.685950] env[70020]: DEBUG oslo_vmware.api [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 610.685950] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52c2b87e-587d-8a9c-23ca-f3a0ecf7c201" [ 610.685950] env[70020]: _type = "Task" [ 610.685950] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.694919] env[70020]: DEBUG oslo_vmware.api [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52c2b87e-587d-8a9c-23ca-f3a0ecf7c201, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.789841] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617638, 'name': CreateVM_Task, 'duration_secs': 0.348522} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.790031] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 610.790585] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.790731] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 610.791101] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 610.791736] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ba3ccfc-067f-4ecf-b44d-0f59ad66638e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.796617] env[70020]: DEBUG oslo_vmware.api [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Waiting for the task: (returnval){ [ 610.796617] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]527da1be-ec3a-ee42-a90d-16b6eeb59054" [ 610.796617] env[70020]: _type = "Task" [ 610.796617] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.814218] env[70020]: DEBUG oslo_vmware.api [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]527da1be-ec3a-ee42-a90d-16b6eeb59054, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.041528] env[70020]: DEBUG oslo_concurrency.lockutils [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Releasing lock "refresh_cache-6a114dce-7ed3-46e1-9d50-c3dd6efd340c" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 611.041528] env[70020]: DEBUG nova.compute.manager [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Instance network_info: |[{"id": "c7c80204-b8b4-46c6-8d93-38d4879119d3", "address": "fa:16:3e:20:f4:6e", "network": {"id": "e0413f92-dcf6-413d-b61f-14b064f9a1d8", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-493233975-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3454ca4e376d4017891025c3a36cebf8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "778b9a40-d603-4765-ac88-bd6d42c457a2", "external-id": "nsx-vlan-transportzone-114", "segmentation_id": 114, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7c80204-b8", "ovs_interfaceid": "c7c80204-b8b4-46c6-8d93-38d4879119d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 611.041654] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:20:f4:6e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '778b9a40-d603-4765-ac88-bd6d42c457a2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c7c80204-b8b4-46c6-8d93-38d4879119d3', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 611.050639] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Creating folder: Project (3454ca4e376d4017891025c3a36cebf8). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 611.051731] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4ebd5eac-4b87-47e9-b986-d8169f09f477 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.063435] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Created folder: Project (3454ca4e376d4017891025c3a36cebf8) in parent group-v721521. [ 611.064772] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Creating folder: Instances. Parent ref: group-v721537. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 611.064772] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-477e68c9-60db-48ea-a5c8-1f9c0f433500 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.070520] env[70020]: DEBUG nova.scheduler.client.report [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 611.075993] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Created folder: Instances in parent group-v721537. [ 611.075993] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 611.076785] env[70020]: DEBUG nova.network.neutron [req-5c24ada2-d7b0-4659-be52-7623a52c5e63 req-faedc22e-3a3b-43a3-948b-c889f78ffb26 service nova] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Updated VIF entry in instance network info cache for port 154faccc-5d99-43cc-a66e-9c06bcc5fdf2. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 611.077172] env[70020]: DEBUG nova.network.neutron [req-5c24ada2-d7b0-4659-be52-7623a52c5e63 req-faedc22e-3a3b-43a3-948b-c889f78ffb26 service nova] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Updating instance_info_cache with network_info: [{"id": "154faccc-5d99-43cc-a66e-9c06bcc5fdf2", "address": "fa:16:3e:99:15:a0", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap154faccc-5d", "ovs_interfaceid": "154faccc-5d99-43cc-a66e-9c06bcc5fdf2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 611.078351] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 611.079063] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-82acfd77-8199-432c-8c0d-1e1d7452ebdd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.098602] env[70020]: DEBUG oslo_concurrency.lockutils [req-5c24ada2-d7b0-4659-be52-7623a52c5e63 req-faedc22e-3a3b-43a3-948b-c889f78ffb26 service nova] Releasing lock "refresh_cache-516341a3-2230-4340-a1e0-ff97bb7a608d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 611.104479] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 611.104479] env[70020]: value = "task-3617641" [ 611.104479] env[70020]: _type = "Task" [ 611.104479] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.114786] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617641, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.197594] env[70020]: DEBUG oslo_vmware.api [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52c2b87e-587d-8a9c-23ca-f3a0ecf7c201, 'name': SearchDatastore_Task, 'duration_secs': 0.012738} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.198955] env[70020]: DEBUG oslo_concurrency.lockutils [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 611.199330] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 81d5a1b4-1398-4fca-b500-aa2a3dc41494/81d5a1b4-1398-4fca-b500-aa2a3dc41494.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 611.199605] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9924e265-0ec5-4e85-8de2-21e988fd77db {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.211649] env[70020]: DEBUG oslo_vmware.api [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 611.211649] env[70020]: value = "task-3617642" [ 611.211649] env[70020]: _type = "Task" [ 611.211649] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.223458] env[70020]: DEBUG oslo_vmware.api [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617642, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.272900] env[70020]: DEBUG oslo_vmware.rw_handles [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Completed reading data from the image iterator. {{(pid=70020) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 611.273413] env[70020]: DEBUG oslo_vmware.rw_handles [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8c118588-724e-42f8-b390-9eeb9d0ed498/c9cd83bf-fd12-4173-a067-f57d38f23556/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=70020) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 611.311335] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 611.311459] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 611.311660] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.416962] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Downloaded image file data c9cd83bf-fd12-4173-a067-f57d38f23556 to vmware_temp/8c118588-724e-42f8-b390-9eeb9d0ed498/c9cd83bf-fd12-4173-a067-f57d38f23556/tmp-sparse.vmdk on the data store datastore1 {{(pid=70020) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 611.421959] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Caching image {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 611.421959] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Copying Virtual Disk [datastore1] vmware_temp/8c118588-724e-42f8-b390-9eeb9d0ed498/c9cd83bf-fd12-4173-a067-f57d38f23556/tmp-sparse.vmdk to [datastore1] vmware_temp/8c118588-724e-42f8-b390-9eeb9d0ed498/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 611.421959] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-84b65e68-7e6f-401a-beb9-a5edbef2cba7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.432038] env[70020]: DEBUG oslo_vmware.api [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Waiting for the task: (returnval){ [ 611.432038] env[70020]: value = "task-3617643" [ 611.432038] env[70020]: _type = "Task" [ 611.432038] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.442484] env[70020]: DEBUG oslo_vmware.api [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': task-3617643, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.580411] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=70020) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 611.582429] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.485s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 611.582848] env[70020]: DEBUG oslo_concurrency.lockutils [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.374s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 611.591067] env[70020]: INFO nova.compute.claims [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 611.597214] env[70020]: DEBUG nova.network.neutron [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Successfully updated port: 27c6992d-5e25-418c-83e7-a49ce44dee0e {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 611.619930] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617641, 'name': CreateVM_Task, 'duration_secs': 0.382421} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.619930] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 611.622179] env[70020]: DEBUG oslo_concurrency.lockutils [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.622179] env[70020]: DEBUG oslo_concurrency.lockutils [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 611.622179] env[70020]: DEBUG oslo_concurrency.lockutils [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 611.622401] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70a6e587-8ea7-4f37-bb03-9263071f7847 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.629722] env[70020]: DEBUG oslo_vmware.api [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Waiting for the task: (returnval){ [ 611.629722] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5216a75c-d34b-18fb-fec5-c4ebcb2a347d" [ 611.629722] env[70020]: _type = "Task" [ 611.629722] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.639526] env[70020]: DEBUG oslo_vmware.api [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5216a75c-d34b-18fb-fec5-c4ebcb2a347d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.727310] env[70020]: DEBUG oslo_vmware.api [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617642, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.948102] env[70020]: DEBUG oslo_vmware.api [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': task-3617643, 'name': CopyVirtualDisk_Task} progress is 27%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.101231] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Acquiring lock "refresh_cache-1f95bfa8-bc97-4ed7-8c33-c00297430bf5" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.101963] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Acquired lock "refresh_cache-1f95bfa8-bc97-4ed7-8c33-c00297430bf5" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 612.102360] env[70020]: DEBUG nova.network.neutron [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 612.144900] env[70020]: DEBUG oslo_concurrency.lockutils [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 612.144900] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 612.144900] env[70020]: DEBUG oslo_concurrency.lockutils [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.224770] env[70020]: DEBUG oslo_vmware.api [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617642, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.585629} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.224957] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 81d5a1b4-1398-4fca-b500-aa2a3dc41494/81d5a1b4-1398-4fca-b500-aa2a3dc41494.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 612.225194] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 612.225471] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9a664078-49c7-463e-b6c5-fb8a41e84755 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.234767] env[70020]: DEBUG oslo_vmware.api [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 612.234767] env[70020]: value = "task-3617644" [ 612.234767] env[70020]: _type = "Task" [ 612.234767] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.256540] env[70020]: DEBUG oslo_vmware.api [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617644, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.445924] env[70020]: DEBUG oslo_vmware.api [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': task-3617643, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.654995] env[70020]: DEBUG nova.network.neutron [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 612.703321] env[70020]: DEBUG nova.compute.manager [None req-cfb47a6a-fe4a-4181-8720-1a80e70e5928 tempest-ServerDiagnosticsV248Test-998026657 tempest-ServerDiagnosticsV248Test-998026657-project-admin] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 612.704960] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97075d09-49f5-4bf7-ae9f-3e054b71d785 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.714483] env[70020]: INFO nova.compute.manager [None req-cfb47a6a-fe4a-4181-8720-1a80e70e5928 tempest-ServerDiagnosticsV248Test-998026657 tempest-ServerDiagnosticsV248Test-998026657-project-admin] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Retrieving diagnostics [ 612.715134] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3206c658-1862-4d62-bef3-4ef9a6e66e68 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.781226] env[70020]: DEBUG oslo_vmware.api [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617644, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068619} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.781226] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 612.781678] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53d796a0-71e9-4763-aee0-905b33a76cc1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.809492] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Reconfiguring VM instance instance-00000003 to attach disk [datastore2] 81d5a1b4-1398-4fca-b500-aa2a3dc41494/81d5a1b4-1398-4fca-b500-aa2a3dc41494.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 612.812974] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4183b63e-c7d4-4d8a-af9a-22e471124a94 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.839182] env[70020]: DEBUG oslo_vmware.api [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 612.839182] env[70020]: value = "task-3617645" [ 612.839182] env[70020]: _type = "Task" [ 612.839182] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.847755] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb1b272c-a23f-4ff7-a2b9-0672e5901810 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.859496] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6b3c097-8e9b-436d-bc8b-1b9778cfa7db {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.863323] env[70020]: DEBUG oslo_vmware.api [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617645, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.891936] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9a051cd-2df4-44c3-8d19-7ca82adf3bcd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.902507] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-504bd1b8-4b04-46ed-940c-33ae34a6f42c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.918788] env[70020]: DEBUG nova.compute.provider_tree [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 612.944734] env[70020]: DEBUG oslo_vmware.api [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': task-3617643, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.344322} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.945289] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Copied Virtual Disk [datastore1] vmware_temp/8c118588-724e-42f8-b390-9eeb9d0ed498/c9cd83bf-fd12-4173-a067-f57d38f23556/tmp-sparse.vmdk to [datastore1] vmware_temp/8c118588-724e-42f8-b390-9eeb9d0ed498/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 612.945289] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Deleting the datastore file [datastore1] vmware_temp/8c118588-724e-42f8-b390-9eeb9d0ed498/c9cd83bf-fd12-4173-a067-f57d38f23556/tmp-sparse.vmdk {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 612.945461] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c70c8008-dd06-4b20-ac2d-8e7c833b1e3f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.951725] env[70020]: DEBUG oslo_vmware.api [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Waiting for the task: (returnval){ [ 612.951725] env[70020]: value = "task-3617646" [ 612.951725] env[70020]: _type = "Task" [ 612.951725] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.960518] env[70020]: DEBUG oslo_vmware.api [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': task-3617646, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.065656] env[70020]: DEBUG nova.network.neutron [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Updating instance_info_cache with network_info: [{"id": "27c6992d-5e25-418c-83e7-a49ce44dee0e", "address": "fa:16:3e:d7:26:c6", "network": {"id": "023ef52b-8a34-4f0e-bc94-cf1fa8c3fb77", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1722246036-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f81ac3e65f9042f4bcf818cd216a32eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27c6992d-5e", "ovs_interfaceid": "27c6992d-5e25-418c-83e7-a49ce44dee0e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.131457] env[70020]: DEBUG oslo_concurrency.lockutils [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Acquiring lock "a0b4a0b0-748d-46eb-9e39-3f21e394c090" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 613.131681] env[70020]: DEBUG oslo_concurrency.lockutils [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Lock "a0b4a0b0-748d-46eb-9e39-3f21e394c090" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 613.350231] env[70020]: DEBUG oslo_vmware.api [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617645, 'name': ReconfigVM_Task, 'duration_secs': 0.289367} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.350611] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Reconfigured VM instance instance-00000003 to attach disk [datastore2] 81d5a1b4-1398-4fca-b500-aa2a3dc41494/81d5a1b4-1398-4fca-b500-aa2a3dc41494.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 613.351262] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7a024c7e-d7b4-479d-b30e-0d9d8c3f729e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.360200] env[70020]: DEBUG oslo_vmware.api [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 613.360200] env[70020]: value = "task-3617647" [ 613.360200] env[70020]: _type = "Task" [ 613.360200] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.369295] env[70020]: DEBUG oslo_vmware.api [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617647, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.422552] env[70020]: DEBUG nova.scheduler.client.report [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 613.462169] env[70020]: DEBUG oslo_vmware.api [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': task-3617646, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.046442} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.462374] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 613.462570] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Moving file from [datastore1] vmware_temp/8c118588-724e-42f8-b390-9eeb9d0ed498/c9cd83bf-fd12-4173-a067-f57d38f23556 to [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556. {{(pid=70020) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 613.462829] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-c34dee61-3aaa-4a15-967b-5d3a137dfd85 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.470791] env[70020]: DEBUG oslo_vmware.api [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Waiting for the task: (returnval){ [ 613.470791] env[70020]: value = "task-3617648" [ 613.470791] env[70020]: _type = "Task" [ 613.470791] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.482952] env[70020]: DEBUG oslo_vmware.api [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': task-3617648, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.483706] env[70020]: DEBUG nova.compute.manager [req-0090b780-65a5-4000-8e42-8194f338ba48 req-c9667aad-9675-4c7a-a826-f89003a3a09d service nova] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Received event network-changed-9ead2454-433c-40e4-962b-8e43443376bb {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 613.483899] env[70020]: DEBUG nova.compute.manager [req-0090b780-65a5-4000-8e42-8194f338ba48 req-c9667aad-9675-4c7a-a826-f89003a3a09d service nova] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Refreshing instance network info cache due to event network-changed-9ead2454-433c-40e4-962b-8e43443376bb. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 613.484127] env[70020]: DEBUG oslo_concurrency.lockutils [req-0090b780-65a5-4000-8e42-8194f338ba48 req-c9667aad-9675-4c7a-a826-f89003a3a09d service nova] Acquiring lock "refresh_cache-81d5a1b4-1398-4fca-b500-aa2a3dc41494" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 613.484259] env[70020]: DEBUG oslo_concurrency.lockutils [req-0090b780-65a5-4000-8e42-8194f338ba48 req-c9667aad-9675-4c7a-a826-f89003a3a09d service nova] Acquired lock "refresh_cache-81d5a1b4-1398-4fca-b500-aa2a3dc41494" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 613.484408] env[70020]: DEBUG nova.network.neutron [req-0090b780-65a5-4000-8e42-8194f338ba48 req-c9667aad-9675-4c7a-a826-f89003a3a09d service nova] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Refreshing network info cache for port 9ead2454-433c-40e4-962b-8e43443376bb {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 613.568739] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Releasing lock "refresh_cache-1f95bfa8-bc97-4ed7-8c33-c00297430bf5" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 613.572181] env[70020]: DEBUG nova.compute.manager [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Instance network_info: |[{"id": "27c6992d-5e25-418c-83e7-a49ce44dee0e", "address": "fa:16:3e:d7:26:c6", "network": {"id": "023ef52b-8a34-4f0e-bc94-cf1fa8c3fb77", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1722246036-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f81ac3e65f9042f4bcf818cd216a32eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27c6992d-5e", "ovs_interfaceid": "27c6992d-5e25-418c-83e7-a49ce44dee0e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 613.572588] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:26:c6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f67a2790-f2b0-4d03-b606-0bfaee7a4229', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '27c6992d-5e25-418c-83e7-a49ce44dee0e', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 613.587019] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Creating folder: Project (f81ac3e65f9042f4bcf818cd216a32eb). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 613.587019] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1a1afa8f-883d-4b75-9de1-86e179fb4981 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.597240] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Created folder: Project (f81ac3e65f9042f4bcf818cd216a32eb) in parent group-v721521. [ 613.597548] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Creating folder: Instances. Parent ref: group-v721540. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 613.597772] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e8ef8c81-95b2-4eb5-93a8-24c6c5c4bbaa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.610581] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Created folder: Instances in parent group-v721540. [ 613.610581] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 613.613433] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 613.613433] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-913e32ee-d8e0-4177-af34-4e31282ed408 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.633678] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 613.633678] env[70020]: value = "task-3617651" [ 613.633678] env[70020]: _type = "Task" [ 613.633678] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.637314] env[70020]: DEBUG nova.compute.manager [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 613.647884] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617651, 'name': CreateVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.871906] env[70020]: DEBUG oslo_vmware.api [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617647, 'name': Rename_Task, 'duration_secs': 0.150258} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.872506] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 613.872965] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bd8606cd-3cc6-4df8-a0ce-359213429487 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.880203] env[70020]: DEBUG oslo_vmware.api [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 613.880203] env[70020]: value = "task-3617652" [ 613.880203] env[70020]: _type = "Task" [ 613.880203] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.888708] env[70020]: DEBUG oslo_vmware.api [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617652, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.928120] env[70020]: DEBUG oslo_concurrency.lockutils [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.345s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 613.928546] env[70020]: DEBUG nova.compute.manager [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 613.931804] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.161s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 613.936984] env[70020]: INFO nova.compute.claims [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 613.984836] env[70020]: DEBUG oslo_vmware.api [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': task-3617648, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.035833} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.986784] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] File moved {{(pid=70020) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 613.988879] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Cleaning up location [datastore1] vmware_temp/8c118588-724e-42f8-b390-9eeb9d0ed498 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 613.988879] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Deleting the datastore file [datastore1] vmware_temp/8c118588-724e-42f8-b390-9eeb9d0ed498 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 613.990682] env[70020]: DEBUG nova.compute.manager [req-8591d1d2-0747-49c0-b0d0-ce63f481cc2b req-aae1078a-b546-4197-b187-87bf8863ae6f service nova] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Received event network-vif-plugged-c7c80204-b8b4-46c6-8d93-38d4879119d3 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 613.990682] env[70020]: DEBUG oslo_concurrency.lockutils [req-8591d1d2-0747-49c0-b0d0-ce63f481cc2b req-aae1078a-b546-4197-b187-87bf8863ae6f service nova] Acquiring lock "6a114dce-7ed3-46e1-9d50-c3dd6efd340c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 613.990682] env[70020]: DEBUG oslo_concurrency.lockutils [req-8591d1d2-0747-49c0-b0d0-ce63f481cc2b req-aae1078a-b546-4197-b187-87bf8863ae6f service nova] Lock "6a114dce-7ed3-46e1-9d50-c3dd6efd340c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 613.990682] env[70020]: DEBUG oslo_concurrency.lockutils [req-8591d1d2-0747-49c0-b0d0-ce63f481cc2b req-aae1078a-b546-4197-b187-87bf8863ae6f service nova] Lock "6a114dce-7ed3-46e1-9d50-c3dd6efd340c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 613.991862] env[70020]: DEBUG nova.compute.manager [req-8591d1d2-0747-49c0-b0d0-ce63f481cc2b req-aae1078a-b546-4197-b187-87bf8863ae6f service nova] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] No waiting events found dispatching network-vif-plugged-c7c80204-b8b4-46c6-8d93-38d4879119d3 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 613.991862] env[70020]: WARNING nova.compute.manager [req-8591d1d2-0747-49c0-b0d0-ce63f481cc2b req-aae1078a-b546-4197-b187-87bf8863ae6f service nova] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Received unexpected event network-vif-plugged-c7c80204-b8b4-46c6-8d93-38d4879119d3 for instance with vm_state building and task_state spawning. [ 613.991862] env[70020]: DEBUG nova.compute.manager [req-8591d1d2-0747-49c0-b0d0-ce63f481cc2b req-aae1078a-b546-4197-b187-87bf8863ae6f service nova] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Received event network-changed-c7c80204-b8b4-46c6-8d93-38d4879119d3 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 613.991862] env[70020]: DEBUG nova.compute.manager [req-8591d1d2-0747-49c0-b0d0-ce63f481cc2b req-aae1078a-b546-4197-b187-87bf8863ae6f service nova] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Refreshing instance network info cache due to event network-changed-c7c80204-b8b4-46c6-8d93-38d4879119d3. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 613.991862] env[70020]: DEBUG oslo_concurrency.lockutils [req-8591d1d2-0747-49c0-b0d0-ce63f481cc2b req-aae1078a-b546-4197-b187-87bf8863ae6f service nova] Acquiring lock "refresh_cache-6a114dce-7ed3-46e1-9d50-c3dd6efd340c" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 613.992285] env[70020]: DEBUG oslo_concurrency.lockutils [req-8591d1d2-0747-49c0-b0d0-ce63f481cc2b req-aae1078a-b546-4197-b187-87bf8863ae6f service nova] Acquired lock "refresh_cache-6a114dce-7ed3-46e1-9d50-c3dd6efd340c" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 613.992285] env[70020]: DEBUG nova.network.neutron [req-8591d1d2-0747-49c0-b0d0-ce63f481cc2b req-aae1078a-b546-4197-b187-87bf8863ae6f service nova] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Refreshing network info cache for port c7c80204-b8b4-46c6-8d93-38d4879119d3 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 613.997927] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-236ff3ba-d1f5-4f2a-98ad-2593554a7809 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.006766] env[70020]: DEBUG oslo_vmware.api [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Waiting for the task: (returnval){ [ 614.006766] env[70020]: value = "task-3617653" [ 614.006766] env[70020]: _type = "Task" [ 614.006766] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.018555] env[70020]: DEBUG oslo_vmware.api [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': task-3617653, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.147459] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617651, 'name': CreateVM_Task, 'duration_secs': 0.41129} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.150638] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 614.153686] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 614.153739] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 614.154333] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 614.155248] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d097c01-c9eb-4d1a-8102-44d480688765 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.162311] env[70020]: DEBUG oslo_vmware.api [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Waiting for the task: (returnval){ [ 614.162311] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52aa2f69-1032-b94a-841b-5d43ec5ae942" [ 614.162311] env[70020]: _type = "Task" [ 614.162311] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.167384] env[70020]: DEBUG oslo_concurrency.lockutils [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 614.172455] env[70020]: DEBUG oslo_vmware.api [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52aa2f69-1032-b94a-841b-5d43ec5ae942, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.395551] env[70020]: DEBUG oslo_vmware.api [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617652, 'name': PowerOnVM_Task} progress is 88%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.441049] env[70020]: DEBUG nova.compute.utils [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 614.446984] env[70020]: DEBUG nova.compute.manager [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 614.449452] env[70020]: DEBUG nova.network.neutron [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 614.529890] env[70020]: DEBUG oslo_vmware.api [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': task-3617653, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.029985} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.529890] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 614.529890] env[70020]: DEBUG nova.policy [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '64934efd425e4b5da45bb75d2bf74a96', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '85abf8ca8009465c87e931b0e9d0fe96', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 614.529890] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6eeb1f51-53ca-4eba-afa6-3446548a95e1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.540744] env[70020]: DEBUG oslo_vmware.api [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Waiting for the task: (returnval){ [ 614.540744] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]522d5fb0-dfc7-23b0-e00c-869c7b35fe96" [ 614.540744] env[70020]: _type = "Task" [ 614.540744] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.552309] env[70020]: DEBUG oslo_vmware.api [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]522d5fb0-dfc7-23b0-e00c-869c7b35fe96, 'name': SearchDatastore_Task, 'duration_secs': 0.011267} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.552594] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 614.552896] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 516341a3-2230-4340-a1e0-ff97bb7a608d/516341a3-2230-4340-a1e0-ff97bb7a608d.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 614.553201] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 614.553423] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 614.553675] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-12ca4be6-03a0-428f-93c6-b895afb95e21 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.561110] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3267e761-b177-4d99-bc00-17d6509a2d9a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.564964] env[70020]: DEBUG oslo_vmware.api [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Waiting for the task: (returnval){ [ 614.564964] env[70020]: value = "task-3617654" [ 614.564964] env[70020]: _type = "Task" [ 614.564964] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.572074] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 614.572280] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 614.573406] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d61e754-7299-4dfa-86aa-b49feda279e2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.581213] env[70020]: DEBUG oslo_vmware.api [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': task-3617654, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.584314] env[70020]: DEBUG oslo_vmware.api [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Waiting for the task: (returnval){ [ 614.584314] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52595b52-9206-8647-c4b3-8a8904029c5a" [ 614.584314] env[70020]: _type = "Task" [ 614.584314] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.594934] env[70020]: DEBUG oslo_vmware.api [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52595b52-9206-8647-c4b3-8a8904029c5a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.678873] env[70020]: DEBUG oslo_vmware.api [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52aa2f69-1032-b94a-841b-5d43ec5ae942, 'name': SearchDatastore_Task, 'duration_secs': 0.013777} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.679708] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 614.681831] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 614.681831] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 614.688184] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Acquiring lock "79d98176-b566-4349-ad10-c2ea6fdbc657" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 614.688685] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Lock "79d98176-b566-4349-ad10-c2ea6fdbc657" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 614.788583] env[70020]: DEBUG nova.network.neutron [req-0090b780-65a5-4000-8e42-8194f338ba48 req-c9667aad-9675-4c7a-a826-f89003a3a09d service nova] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Updated VIF entry in instance network info cache for port 9ead2454-433c-40e4-962b-8e43443376bb. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 614.788692] env[70020]: DEBUG nova.network.neutron [req-0090b780-65a5-4000-8e42-8194f338ba48 req-c9667aad-9675-4c7a-a826-f89003a3a09d service nova] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Updating instance_info_cache with network_info: [{"id": "9ead2454-433c-40e4-962b-8e43443376bb", "address": "fa:16:3e:53:8e:9e", "network": {"id": "8a7c653b-f915-404f-a4ab-bea48b7e6574", "bridge": "br-int", "label": "tempest-ImagesTestJSON-377932844-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52cd193f3ca7403a986d72f072590f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ead2454-43", "ovs_interfaceid": "9ead2454-433c-40e4-962b-8e43443376bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 614.832942] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bfa7f5ef-38db-4e9f-ae38-739bce9c49a2 tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Acquiring lock "01773af2-4ce2-4d2a-b334-ab99348000a5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 614.833454] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bfa7f5ef-38db-4e9f-ae38-739bce9c49a2 tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Lock "01773af2-4ce2-4d2a-b334-ab99348000a5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 614.835560] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bfa7f5ef-38db-4e9f-ae38-739bce9c49a2 tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Acquiring lock "01773af2-4ce2-4d2a-b334-ab99348000a5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 614.835560] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bfa7f5ef-38db-4e9f-ae38-739bce9c49a2 tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Lock "01773af2-4ce2-4d2a-b334-ab99348000a5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 614.835560] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bfa7f5ef-38db-4e9f-ae38-739bce9c49a2 tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Lock "01773af2-4ce2-4d2a-b334-ab99348000a5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 614.840478] env[70020]: INFO nova.compute.manager [None req-bfa7f5ef-38db-4e9f-ae38-739bce9c49a2 tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Terminating instance [ 614.898681] env[70020]: DEBUG oslo_vmware.api [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617652, 'name': PowerOnVM_Task, 'duration_secs': 0.69065} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.902234] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 614.902234] env[70020]: INFO nova.compute.manager [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Took 13.95 seconds to spawn the instance on the hypervisor. [ 614.902234] env[70020]: DEBUG nova.compute.manager [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 614.903096] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b8693db-99d1-4917-bf63-6dea33be8a9b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.951028] env[70020]: DEBUG nova.compute.manager [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 615.076233] env[70020]: DEBUG oslo_vmware.api [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': task-3617654, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.104637] env[70020]: DEBUG oslo_vmware.api [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52595b52-9206-8647-c4b3-8a8904029c5a, 'name': SearchDatastore_Task, 'duration_secs': 0.01082} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.105351] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3a849b9-bd60-406f-a971-53f89fec53c8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.115613] env[70020]: DEBUG oslo_vmware.api [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Waiting for the task: (returnval){ [ 615.115613] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52499052-a7d8-36e7-0147-c8e46e8ae88c" [ 615.115613] env[70020]: _type = "Task" [ 615.115613] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.125545] env[70020]: DEBUG oslo_vmware.api [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52499052-a7d8-36e7-0147-c8e46e8ae88c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.196175] env[70020]: DEBUG nova.compute.manager [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 615.218148] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c69108c-04ee-4719-b585-b0ad1b79828e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.229021] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad84bb23-93da-441b-8750-b1be96510f2a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.258576] env[70020]: DEBUG nova.network.neutron [req-8591d1d2-0747-49c0-b0d0-ce63f481cc2b req-aae1078a-b546-4197-b187-87bf8863ae6f service nova] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Updated VIF entry in instance network info cache for port c7c80204-b8b4-46c6-8d93-38d4879119d3. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 615.259022] env[70020]: DEBUG nova.network.neutron [req-8591d1d2-0747-49c0-b0d0-ce63f481cc2b req-aae1078a-b546-4197-b187-87bf8863ae6f service nova] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Updating instance_info_cache with network_info: [{"id": "c7c80204-b8b4-46c6-8d93-38d4879119d3", "address": "fa:16:3e:20:f4:6e", "network": {"id": "e0413f92-dcf6-413d-b61f-14b064f9a1d8", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-493233975-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3454ca4e376d4017891025c3a36cebf8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "778b9a40-d603-4765-ac88-bd6d42c457a2", "external-id": "nsx-vlan-transportzone-114", "segmentation_id": 114, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7c80204-b8", "ovs_interfaceid": "c7c80204-b8b4-46c6-8d93-38d4879119d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.261337] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7955d35-9c50-41d1-b308-072c2209587e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.270201] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8afc65bc-a164-45ff-af39-858639bdde35 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.286472] env[70020]: DEBUG nova.compute.provider_tree [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 615.294188] env[70020]: DEBUG oslo_concurrency.lockutils [req-0090b780-65a5-4000-8e42-8194f338ba48 req-c9667aad-9675-4c7a-a826-f89003a3a09d service nova] Releasing lock "refresh_cache-81d5a1b4-1398-4fca-b500-aa2a3dc41494" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 615.344590] env[70020]: DEBUG nova.compute.manager [None req-bfa7f5ef-38db-4e9f-ae38-739bce9c49a2 tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 615.344759] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bfa7f5ef-38db-4e9f-ae38-739bce9c49a2 tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 615.345898] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16223ce5-f239-4a75-9935-8b772d6d8a84 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.354500] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfa7f5ef-38db-4e9f-ae38-739bce9c49a2 tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 615.354719] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-818537ac-44b7-4ab0-bf69-046c3314b3f7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.361037] env[70020]: DEBUG oslo_vmware.api [None req-bfa7f5ef-38db-4e9f-ae38-739bce9c49a2 tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Waiting for the task: (returnval){ [ 615.361037] env[70020]: value = "task-3617655" [ 615.361037] env[70020]: _type = "Task" [ 615.361037] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.369515] env[70020]: DEBUG oslo_vmware.api [None req-bfa7f5ef-38db-4e9f-ae38-739bce9c49a2 tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Task: {'id': task-3617655, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.425325] env[70020]: INFO nova.compute.manager [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Took 22.61 seconds to build instance. [ 615.587136] env[70020]: DEBUG oslo_vmware.api [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': task-3617654, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.611302} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.587472] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 516341a3-2230-4340-a1e0-ff97bb7a608d/516341a3-2230-4340-a1e0-ff97bb7a608d.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 615.588280] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 615.588280] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-08ea6f8d-f146-4dcd-8b3f-131dec683dd0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.597276] env[70020]: DEBUG oslo_vmware.api [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Waiting for the task: (returnval){ [ 615.597276] env[70020]: value = "task-3617656" [ 615.597276] env[70020]: _type = "Task" [ 615.597276] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.611184] env[70020]: DEBUG oslo_vmware.api [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': task-3617656, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.630340] env[70020]: DEBUG oslo_vmware.api [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52499052-a7d8-36e7-0147-c8e46e8ae88c, 'name': SearchDatastore_Task, 'duration_secs': 0.028474} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.630340] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 615.630948] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] b0b825d4-534d-4d54-a0c4-b9e507726c47/b0b825d4-534d-4d54-a0c4-b9e507726c47.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 615.630948] env[70020]: DEBUG oslo_concurrency.lockutils [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 615.631159] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 615.631408] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3c6719e4-8d49-4d1d-96d9-02d6cee6845b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.640019] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8a2fea57-6b02-4e86-87dd-c25c8bd59c33 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.648727] env[70020]: DEBUG oslo_vmware.api [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Waiting for the task: (returnval){ [ 615.648727] env[70020]: value = "task-3617657" [ 615.648727] env[70020]: _type = "Task" [ 615.648727] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.654496] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 615.654720] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 615.657430] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd1968f5-e4da-4e5f-bae9-e5e6c74e5bfa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.661734] env[70020]: DEBUG oslo_vmware.api [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': task-3617657, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.668350] env[70020]: DEBUG oslo_vmware.api [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Waiting for the task: (returnval){ [ 615.668350] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52eb1c8c-3054-abb2-57f8-ae8ed9d3c365" [ 615.668350] env[70020]: _type = "Task" [ 615.668350] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.679416] env[70020]: DEBUG oslo_vmware.api [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52eb1c8c-3054-abb2-57f8-ae8ed9d3c365, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.732035] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 615.737246] env[70020]: DEBUG nova.network.neutron [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Successfully created port: 3af5d84e-e814-4689-aa70-e63d58041799 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 615.765374] env[70020]: DEBUG oslo_concurrency.lockutils [req-8591d1d2-0747-49c0-b0d0-ce63f481cc2b req-aae1078a-b546-4197-b187-87bf8863ae6f service nova] Releasing lock "refresh_cache-6a114dce-7ed3-46e1-9d50-c3dd6efd340c" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 615.769304] env[70020]: DEBUG nova.compute.manager [req-8591d1d2-0747-49c0-b0d0-ce63f481cc2b req-aae1078a-b546-4197-b187-87bf8863ae6f service nova] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Received event network-vif-plugged-27c6992d-5e25-418c-83e7-a49ce44dee0e {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 615.769622] env[70020]: DEBUG oslo_concurrency.lockutils [req-8591d1d2-0747-49c0-b0d0-ce63f481cc2b req-aae1078a-b546-4197-b187-87bf8863ae6f service nova] Acquiring lock "1f95bfa8-bc97-4ed7-8c33-c00297430bf5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 615.769895] env[70020]: DEBUG oslo_concurrency.lockutils [req-8591d1d2-0747-49c0-b0d0-ce63f481cc2b req-aae1078a-b546-4197-b187-87bf8863ae6f service nova] Lock "1f95bfa8-bc97-4ed7-8c33-c00297430bf5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 615.771372] env[70020]: DEBUG oslo_concurrency.lockutils [req-8591d1d2-0747-49c0-b0d0-ce63f481cc2b req-aae1078a-b546-4197-b187-87bf8863ae6f service nova] Lock "1f95bfa8-bc97-4ed7-8c33-c00297430bf5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 615.771372] env[70020]: DEBUG nova.compute.manager [req-8591d1d2-0747-49c0-b0d0-ce63f481cc2b req-aae1078a-b546-4197-b187-87bf8863ae6f service nova] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] No waiting events found dispatching network-vif-plugged-27c6992d-5e25-418c-83e7-a49ce44dee0e {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 615.771372] env[70020]: WARNING nova.compute.manager [req-8591d1d2-0747-49c0-b0d0-ce63f481cc2b req-aae1078a-b546-4197-b187-87bf8863ae6f service nova] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Received unexpected event network-vif-plugged-27c6992d-5e25-418c-83e7-a49ce44dee0e for instance with vm_state building and task_state spawning. [ 615.789356] env[70020]: DEBUG nova.scheduler.client.report [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 615.879280] env[70020]: DEBUG oslo_vmware.api [None req-bfa7f5ef-38db-4e9f-ae38-739bce9c49a2 tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Task: {'id': task-3617655, 'name': PowerOffVM_Task, 'duration_secs': 0.198849} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.879612] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfa7f5ef-38db-4e9f-ae38-739bce9c49a2 tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 615.879718] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bfa7f5ef-38db-4e9f-ae38-739bce9c49a2 tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 615.879967] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1473d580-8fb8-4ce4-a120-65594d2debde {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.928444] env[70020]: DEBUG oslo_concurrency.lockutils [None req-899acf86-2441-4e54-b11c-42492d548d8d tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "81d5a1b4-1398-4fca-b500-aa2a3dc41494" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.129s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 615.949645] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bfa7f5ef-38db-4e9f-ae38-739bce9c49a2 tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 615.949963] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bfa7f5ef-38db-4e9f-ae38-739bce9c49a2 tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 615.950610] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-bfa7f5ef-38db-4e9f-ae38-739bce9c49a2 tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Deleting the datastore file [datastore2] 01773af2-4ce2-4d2a-b334-ab99348000a5 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 615.950989] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8fef9305-0300-46a4-96f7-b2ee0b5ffe22 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.962665] env[70020]: DEBUG oslo_vmware.api [None req-bfa7f5ef-38db-4e9f-ae38-739bce9c49a2 tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Waiting for the task: (returnval){ [ 615.962665] env[70020]: value = "task-3617659" [ 615.962665] env[70020]: _type = "Task" [ 615.962665] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.975027] env[70020]: DEBUG nova.compute.manager [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 615.988472] env[70020]: DEBUG oslo_vmware.api [None req-bfa7f5ef-38db-4e9f-ae38-739bce9c49a2 tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Task: {'id': task-3617659, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.010108] env[70020]: DEBUG nova.virt.hardware [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 616.010242] env[70020]: DEBUG nova.virt.hardware [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 616.010428] env[70020]: DEBUG nova.virt.hardware [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 616.011037] env[70020]: DEBUG nova.virt.hardware [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 616.011037] env[70020]: DEBUG nova.virt.hardware [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 616.011037] env[70020]: DEBUG nova.virt.hardware [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 616.012080] env[70020]: DEBUG nova.virt.hardware [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 616.012080] env[70020]: DEBUG nova.virt.hardware [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 616.012080] env[70020]: DEBUG nova.virt.hardware [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 616.012309] env[70020]: DEBUG nova.virt.hardware [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 616.012705] env[70020]: DEBUG nova.virt.hardware [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 616.013664] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c953b3e-f880-4a44-9db9-0cb796999542 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.023874] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bec7cd83-14e0-400e-883d-88c0f6ff1e85 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.108203] env[70020]: DEBUG oslo_vmware.api [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': task-3617656, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067782} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.108586] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 616.109442] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cf95537-1c20-4785-990d-db8bce4946a4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.139938] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Reconfiguring VM instance instance-00000004 to attach disk [datastore1] 516341a3-2230-4340-a1e0-ff97bb7a608d/516341a3-2230-4340-a1e0-ff97bb7a608d.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 616.140265] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ccaab4c4-dfee-41be-9435-4c7f7d284340 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.169755] env[70020]: DEBUG oslo_vmware.api [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': task-3617657, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.479667} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.175242] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] b0b825d4-534d-4d54-a0c4-b9e507726c47/b0b825d4-534d-4d54-a0c4-b9e507726c47.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 616.175242] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 616.175242] env[70020]: DEBUG oslo_vmware.api [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Waiting for the task: (returnval){ [ 616.175242] env[70020]: value = "task-3617660" [ 616.175242] env[70020]: _type = "Task" [ 616.175242] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.176400] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-79c34721-efde-463c-be89-aebc21132ddf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.187462] env[70020]: DEBUG oslo_vmware.api [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52eb1c8c-3054-abb2-57f8-ae8ed9d3c365, 'name': SearchDatastore_Task, 'duration_secs': 0.009728} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.190201] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91022a24-8bcb-4ba2-8562-8f43ff2d14b5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.193439] env[70020]: DEBUG oslo_vmware.api [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Waiting for the task: (returnval){ [ 616.193439] env[70020]: value = "task-3617661" [ 616.193439] env[70020]: _type = "Task" [ 616.193439] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.200681] env[70020]: DEBUG oslo_vmware.api [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Waiting for the task: (returnval){ [ 616.200681] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52b86988-e228-c0ef-55f2-95f3c7dcad03" [ 616.200681] env[70020]: _type = "Task" [ 616.200681] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.207309] env[70020]: DEBUG oslo_vmware.api [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': task-3617660, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.212975] env[70020]: DEBUG oslo_vmware.api [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': task-3617661, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.218210] env[70020]: DEBUG oslo_vmware.api [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b86988-e228-c0ef-55f2-95f3c7dcad03, 'name': SearchDatastore_Task, 'duration_secs': 0.010554} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.218454] env[70020]: DEBUG oslo_concurrency.lockutils [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 616.218742] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 6a114dce-7ed3-46e1-9d50-c3dd6efd340c/6a114dce-7ed3-46e1-9d50-c3dd6efd340c.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 616.218972] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 616.219163] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 616.219406] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-01f69d8b-4ead-4d42-a008-a600e1d94e18 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.221914] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c98f6a28-9a68-47d0-92c2-b440cded40a9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.227679] env[70020]: DEBUG oslo_vmware.api [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Waiting for the task: (returnval){ [ 616.227679] env[70020]: value = "task-3617662" [ 616.227679] env[70020]: _type = "Task" [ 616.227679] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.232265] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 616.232427] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 616.236321] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9552038e-5af3-4a99-9b92-e06f742e4e3d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.238961] env[70020]: DEBUG oslo_vmware.api [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Task: {'id': task-3617662, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.242094] env[70020]: DEBUG oslo_vmware.api [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Waiting for the task: (returnval){ [ 616.242094] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52de07ca-808e-5d1e-6450-ffa75276423a" [ 616.242094] env[70020]: _type = "Task" [ 616.242094] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.251427] env[70020]: DEBUG oslo_vmware.api [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52de07ca-808e-5d1e-6450-ffa75276423a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.299343] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.367s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 616.299343] env[70020]: DEBUG nova.compute.manager [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 616.302454] env[70020]: DEBUG oslo_concurrency.lockutils [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.135s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 616.304619] env[70020]: INFO nova.compute.claims [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 616.333025] env[70020]: DEBUG oslo_concurrency.lockutils [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquiring lock "ea97f6ab-057e-44d3-835a-68b46d241621" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 616.333025] env[70020]: DEBUG oslo_concurrency.lockutils [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "ea97f6ab-057e-44d3-835a-68b46d241621" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 616.337430] env[70020]: DEBUG oslo_concurrency.lockutils [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Acquiring lock "d601179a-df77-4f2e-b8df-9185b8a485e3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 616.337816] env[70020]: DEBUG oslo_concurrency.lockutils [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Lock "d601179a-df77-4f2e-b8df-9185b8a485e3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 616.475098] env[70020]: DEBUG oslo_vmware.api [None req-bfa7f5ef-38db-4e9f-ae38-739bce9c49a2 tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Task: {'id': task-3617659, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.235783} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.475302] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-bfa7f5ef-38db-4e9f-ae38-739bce9c49a2 tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 616.475486] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bfa7f5ef-38db-4e9f-ae38-739bce9c49a2 tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 616.475653] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bfa7f5ef-38db-4e9f-ae38-739bce9c49a2 tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 616.475820] env[70020]: INFO nova.compute.manager [None req-bfa7f5ef-38db-4e9f-ae38-739bce9c49a2 tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Took 1.13 seconds to destroy the instance on the hypervisor. [ 616.476070] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bfa7f5ef-38db-4e9f-ae38-739bce9c49a2 tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 616.476269] env[70020]: DEBUG nova.compute.manager [-] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 616.476378] env[70020]: DEBUG nova.network.neutron [-] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 616.692330] env[70020]: DEBUG oslo_vmware.api [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': task-3617660, 'name': ReconfigVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.705925] env[70020]: DEBUG oslo_vmware.api [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': task-3617661, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068484} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.706089] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 616.706892] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af541243-9adb-48ad-9ca6-2fe7e2c82bf1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.732542] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Reconfiguring VM instance instance-00000007 to attach disk [datastore1] b0b825d4-534d-4d54-a0c4-b9e507726c47/b0b825d4-534d-4d54-a0c4-b9e507726c47.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 616.733119] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1b33b4f7-4058-497c-8ce1-518b39a7d05b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.760977] env[70020]: DEBUG oslo_vmware.api [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Task: {'id': task-3617662, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.524313} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.765613] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 6a114dce-7ed3-46e1-9d50-c3dd6efd340c/6a114dce-7ed3-46e1-9d50-c3dd6efd340c.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 616.765904] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 616.766276] env[70020]: DEBUG oslo_vmware.api [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Waiting for the task: (returnval){ [ 616.766276] env[70020]: value = "task-3617663" [ 616.766276] env[70020]: _type = "Task" [ 616.766276] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.766477] env[70020]: DEBUG oslo_vmware.api [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52de07ca-808e-5d1e-6450-ffa75276423a, 'name': SearchDatastore_Task, 'duration_secs': 0.011447} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.766775] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-29469396-36e7-4ca3-8920-1ffd2e2ab984 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.772837] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7fa56dc4-b780-49cd-8b9c-e30a8eb5b5ce {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.780366] env[70020]: DEBUG oslo_vmware.api [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': task-3617663, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.782664] env[70020]: DEBUG oslo_vmware.api [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Waiting for the task: (returnval){ [ 616.782664] env[70020]: value = "task-3617664" [ 616.782664] env[70020]: _type = "Task" [ 616.782664] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.782930] env[70020]: DEBUG oslo_vmware.api [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Waiting for the task: (returnval){ [ 616.782930] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52d4e78c-58d9-5f2e-15c9-b69b8bfade50" [ 616.782930] env[70020]: _type = "Task" [ 616.782930] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.795495] env[70020]: DEBUG oslo_vmware.api [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Task: {'id': task-3617664, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.800203] env[70020]: DEBUG oslo_vmware.api [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52d4e78c-58d9-5f2e-15c9-b69b8bfade50, 'name': SearchDatastore_Task, 'duration_secs': 0.009537} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.800203] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 616.800203] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 1f95bfa8-bc97-4ed7-8c33-c00297430bf5/1f95bfa8-bc97-4ed7-8c33-c00297430bf5.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 616.800203] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-afbd6f60-d639-4a6a-bbd6-20fad5f20308 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.805183] env[70020]: DEBUG oslo_vmware.api [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Waiting for the task: (returnval){ [ 616.805183] env[70020]: value = "task-3617665" [ 616.805183] env[70020]: _type = "Task" [ 616.805183] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.810327] env[70020]: DEBUG nova.compute.utils [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 616.813978] env[70020]: DEBUG nova.compute.manager [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 616.814221] env[70020]: DEBUG nova.network.neutron [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 616.821490] env[70020]: DEBUG oslo_vmware.api [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3617665, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.837838] env[70020]: DEBUG nova.compute.manager [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 616.840610] env[70020]: DEBUG nova.compute.manager [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 616.937869] env[70020]: DEBUG nova.policy [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '01af71591d00469fb2a87c105e083644', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a63e1e3fd80f4c9a89e43f01aeba54bf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 617.195283] env[70020]: DEBUG oslo_vmware.api [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': task-3617660, 'name': ReconfigVM_Task, 'duration_secs': 0.543618} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.197658] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Reconfigured VM instance instance-00000004 to attach disk [datastore1] 516341a3-2230-4340-a1e0-ff97bb7a608d/516341a3-2230-4340-a1e0-ff97bb7a608d.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 617.199615] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3586d4bf-a383-4b61-a0d7-ed06af643c56 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.207075] env[70020]: DEBUG oslo_vmware.api [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Waiting for the task: (returnval){ [ 617.207075] env[70020]: value = "task-3617666" [ 617.207075] env[70020]: _type = "Task" [ 617.207075] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.218818] env[70020]: DEBUG oslo_vmware.api [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': task-3617666, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.281372] env[70020]: DEBUG oslo_vmware.api [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': task-3617663, 'name': ReconfigVM_Task, 'duration_secs': 0.454628} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.282101] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Reconfigured VM instance instance-00000007 to attach disk [datastore1] b0b825d4-534d-4d54-a0c4-b9e507726c47/b0b825d4-534d-4d54-a0c4-b9e507726c47.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 617.282697] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7a084c57-2aef-471f-94de-162d3912df27 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.297865] env[70020]: DEBUG oslo_vmware.api [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Waiting for the task: (returnval){ [ 617.297865] env[70020]: value = "task-3617667" [ 617.297865] env[70020]: _type = "Task" [ 617.297865] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.297865] env[70020]: DEBUG oslo_vmware.api [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Task: {'id': task-3617664, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084409} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.297865] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 617.304576] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e9fc295-1148-41a5-a219-b25130f6cc5d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.314016] env[70020]: DEBUG oslo_vmware.api [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': task-3617667, 'name': Rename_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.326587] env[70020]: DEBUG nova.compute.manager [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 617.346150] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Reconfiguring VM instance instance-00000005 to attach disk [datastore1] 6a114dce-7ed3-46e1-9d50-c3dd6efd340c/6a114dce-7ed3-46e1-9d50-c3dd6efd340c.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 617.360046] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d02353d-2f30-4176-922c-e4c0701468be {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.379201] env[70020]: DEBUG oslo_vmware.api [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3617665, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.465255} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.385176] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 1f95bfa8-bc97-4ed7-8c33-c00297430bf5/1f95bfa8-bc97-4ed7-8c33-c00297430bf5.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 617.385411] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 617.386727] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a04514fc-2fcb-4bce-8429-9b5f883312f7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.392056] env[70020]: DEBUG oslo_vmware.api [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Waiting for the task: (returnval){ [ 617.392056] env[70020]: value = "task-3617668" [ 617.392056] env[70020]: _type = "Task" [ 617.392056] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.398585] env[70020]: DEBUG oslo_vmware.api [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Waiting for the task: (returnval){ [ 617.398585] env[70020]: value = "task-3617669" [ 617.398585] env[70020]: _type = "Task" [ 617.398585] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.405835] env[70020]: DEBUG oslo_concurrency.lockutils [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 617.409956] env[70020]: DEBUG oslo_vmware.api [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Task: {'id': task-3617668, 'name': ReconfigVM_Task} progress is 10%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.415253] env[70020]: DEBUG oslo_vmware.api [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3617669, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.435028] env[70020]: DEBUG oslo_concurrency.lockutils [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 617.601040] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-061ae64b-a393-44c7-9aa8-37ccb27c4bd4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.608624] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cad9480-2f69-446d-a3ea-30f3ab690d7d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.642469] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6010bd69-87b3-44eb-93e6-8b3c894c1223 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.650223] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-197f91ca-34a6-445f-b67a-36f6ca9afb2c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.665323] env[70020]: DEBUG nova.compute.provider_tree [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 617.716760] env[70020]: DEBUG oslo_vmware.api [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': task-3617666, 'name': Rename_Task, 'duration_secs': 0.171153} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.716914] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 617.717077] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9b811318-d2e6-4e61-85f6-7aad2f1bcd17 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.723782] env[70020]: DEBUG oslo_vmware.api [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Waiting for the task: (returnval){ [ 617.723782] env[70020]: value = "task-3617670" [ 617.723782] env[70020]: _type = "Task" [ 617.723782] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.731701] env[70020]: DEBUG oslo_vmware.api [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': task-3617670, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.810586] env[70020]: DEBUG oslo_vmware.api [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': task-3617667, 'name': Rename_Task, 'duration_secs': 0.167576} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.810586] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 617.810586] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a9094535-f72c-4405-80d5-46988a651d6a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.814458] env[70020]: DEBUG oslo_vmware.api [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Waiting for the task: (returnval){ [ 617.814458] env[70020]: value = "task-3617671" [ 617.814458] env[70020]: _type = "Task" [ 617.814458] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.823797] env[70020]: DEBUG oslo_vmware.api [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': task-3617671, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.824922] env[70020]: DEBUG nova.network.neutron [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Successfully created port: d435ef58-a090-4c51-b69d-6bafa2b6ff27 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 617.907770] env[70020]: DEBUG oslo_vmware.api [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Task: {'id': task-3617668, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.917157] env[70020]: DEBUG oslo_vmware.api [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3617669, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070202} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.917426] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 617.918228] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bf99dea-2541-4cb6-8570-ac56befc2ed8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.946678] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Reconfiguring VM instance instance-00000006 to attach disk [datastore1] 1f95bfa8-bc97-4ed7-8c33-c00297430bf5/1f95bfa8-bc97-4ed7-8c33-c00297430bf5.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 617.947078] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-832f49fc-d262-474a-993f-6c0e2c3e099c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.972066] env[70020]: DEBUG oslo_vmware.api [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Waiting for the task: (returnval){ [ 617.972066] env[70020]: value = "task-3617672" [ 617.972066] env[70020]: _type = "Task" [ 617.972066] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.981702] env[70020]: DEBUG oslo_vmware.api [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3617672, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.043742] env[70020]: DEBUG nova.network.neutron [-] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 618.169242] env[70020]: DEBUG nova.scheduler.client.report [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 618.239017] env[70020]: DEBUG oslo_vmware.api [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': task-3617670, 'name': PowerOnVM_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.326345] env[70020]: DEBUG oslo_vmware.api [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': task-3617671, 'name': PowerOnVM_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.360659] env[70020]: DEBUG nova.compute.manager [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 618.390415] env[70020]: DEBUG nova.virt.hardware [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 618.390415] env[70020]: DEBUG nova.virt.hardware [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 618.390415] env[70020]: DEBUG nova.virt.hardware [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 618.390773] env[70020]: DEBUG nova.virt.hardware [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 618.390773] env[70020]: DEBUG nova.virt.hardware [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 618.390773] env[70020]: DEBUG nova.virt.hardware [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 618.390773] env[70020]: DEBUG nova.virt.hardware [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 618.390773] env[70020]: DEBUG nova.virt.hardware [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 618.390945] env[70020]: DEBUG nova.virt.hardware [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 618.391090] env[70020]: DEBUG nova.virt.hardware [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 618.391231] env[70020]: DEBUG nova.virt.hardware [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 618.392206] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6bf1dd9-5460-4dfc-82ab-bb55106c8d1d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.411286] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c26897ab-df09-4060-8ea4-776e5bcdd8d9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.415336] env[70020]: DEBUG oslo_vmware.api [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Task: {'id': task-3617668, 'name': ReconfigVM_Task, 'duration_secs': 0.625963} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.415504] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Reconfigured VM instance instance-00000005 to attach disk [datastore1] 6a114dce-7ed3-46e1-9d50-c3dd6efd340c/6a114dce-7ed3-46e1-9d50-c3dd6efd340c.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 618.417064] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-980a9aec-351e-4cae-9886-7c8de09c9c58 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.430563] env[70020]: DEBUG oslo_vmware.api [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Waiting for the task: (returnval){ [ 618.430563] env[70020]: value = "task-3617673" [ 618.430563] env[70020]: _type = "Task" [ 618.430563] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.439442] env[70020]: DEBUG oslo_vmware.api [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Task: {'id': task-3617673, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.483993] env[70020]: DEBUG oslo_vmware.api [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3617672, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.547124] env[70020]: INFO nova.compute.manager [-] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Took 2.07 seconds to deallocate network for instance. [ 618.675618] env[70020]: DEBUG oslo_concurrency.lockutils [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.373s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 618.676153] env[70020]: DEBUG nova.compute.manager [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 618.679087] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.947s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 618.680615] env[70020]: INFO nova.compute.claims [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 618.739762] env[70020]: DEBUG oslo_vmware.api [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': task-3617670, 'name': PowerOnVM_Task, 'duration_secs': 0.516503} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.740057] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 618.740253] env[70020]: INFO nova.compute.manager [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Took 15.47 seconds to spawn the instance on the hypervisor. [ 618.740450] env[70020]: DEBUG nova.compute.manager [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 618.741253] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6729a57-970a-4824-a141-535b4f0959cc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.791041] env[70020]: DEBUG nova.network.neutron [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Successfully updated port: 3af5d84e-e814-4689-aa70-e63d58041799 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 618.826389] env[70020]: DEBUG oslo_vmware.api [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': task-3617671, 'name': PowerOnVM_Task, 'duration_secs': 0.515591} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.826674] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 618.826869] env[70020]: INFO nova.compute.manager [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Took 8.71 seconds to spawn the instance on the hypervisor. [ 618.827214] env[70020]: DEBUG nova.compute.manager [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 618.827996] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51a7499e-2973-4ee0-ae45-66d196db11fb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.941795] env[70020]: DEBUG oslo_vmware.api [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Task: {'id': task-3617673, 'name': Rename_Task, 'duration_secs': 0.346661} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.941992] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 618.942324] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6abe2097-1806-4df2-a23e-789e085b31c7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.950061] env[70020]: DEBUG oslo_vmware.api [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Waiting for the task: (returnval){ [ 618.950061] env[70020]: value = "task-3617674" [ 618.950061] env[70020]: _type = "Task" [ 618.950061] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.963539] env[70020]: DEBUG oslo_vmware.api [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Task: {'id': task-3617674, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.982681] env[70020]: DEBUG oslo_vmware.api [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3617672, 'name': ReconfigVM_Task, 'duration_secs': 0.990626} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.982945] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Reconfigured VM instance instance-00000006 to attach disk [datastore1] 1f95bfa8-bc97-4ed7-8c33-c00297430bf5/1f95bfa8-bc97-4ed7-8c33-c00297430bf5.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 618.984890] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cfda9e82-e78b-49b8-9912-ab9a69e96191 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.991246] env[70020]: DEBUG oslo_vmware.api [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Waiting for the task: (returnval){ [ 618.991246] env[70020]: value = "task-3617675" [ 618.991246] env[70020]: _type = "Task" [ 618.991246] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.999621] env[70020]: DEBUG oslo_vmware.api [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3617675, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.058406] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bfa7f5ef-38db-4e9f-ae38-739bce9c49a2 tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 619.185140] env[70020]: DEBUG nova.compute.utils [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 619.189213] env[70020]: DEBUG nova.compute.manager [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 619.189427] env[70020]: DEBUG nova.network.neutron [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 619.261077] env[70020]: INFO nova.compute.manager [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Took 25.19 seconds to build instance. [ 619.294301] env[70020]: DEBUG oslo_concurrency.lockutils [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Acquiring lock "refresh_cache-bb4e4986-af2a-4832-9ec7-777bca863dce" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.294301] env[70020]: DEBUG oslo_concurrency.lockutils [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Acquired lock "refresh_cache-bb4e4986-af2a-4832-9ec7-777bca863dce" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 619.294301] env[70020]: DEBUG nova.network.neutron [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 619.301132] env[70020]: DEBUG nova.policy [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f4bcec58841942e29278f2b152add2d9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b6039c5c57424a98bbbc8c0f38e9741f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 619.350987] env[70020]: INFO nova.compute.manager [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Took 18.42 seconds to build instance. [ 619.377703] env[70020]: DEBUG nova.network.neutron [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Successfully updated port: d435ef58-a090-4c51-b69d-6bafa2b6ff27 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 619.462879] env[70020]: DEBUG oslo_vmware.api [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Task: {'id': task-3617674, 'name': PowerOnVM_Task} progress is 88%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.503113] env[70020]: DEBUG oslo_vmware.api [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3617675, 'name': Rename_Task, 'duration_secs': 0.207723} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.503530] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 619.503871] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-87769891-6e9e-4028-b68d-c8e687b3a5f9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.512384] env[70020]: DEBUG oslo_vmware.api [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Waiting for the task: (returnval){ [ 619.512384] env[70020]: value = "task-3617676" [ 619.512384] env[70020]: _type = "Task" [ 619.512384] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.524413] env[70020]: DEBUG oslo_vmware.api [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3617676, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.697577] env[70020]: DEBUG nova.compute.manager [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 619.764530] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5390227a-79e2-41ca-8a7c-d15c6cd62c86 tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Lock "516341a3-2230-4340-a1e0-ff97bb7a608d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.698s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 619.847339] env[70020]: DEBUG nova.network.neutron [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 619.860740] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb49285b-ccc1-4327-a8f7-01885710d2ec tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Lock "b0b825d4-534d-4d54-a0c4-b9e507726c47" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.956s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 619.881112] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Acquiring lock "refresh_cache-372e5569-8824-4841-b3d6-4b07423c7b3d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.881112] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Acquired lock "refresh_cache-372e5569-8824-4841-b3d6-4b07423c7b3d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 619.881112] env[70020]: DEBUG nova.network.neutron [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 619.936988] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11597ad2-4287-4adf-b18f-d0a8163220cf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.948289] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9f64c71-e191-415e-ae2e-27bbd6626ff6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.984939] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37097595-628d-42eb-8a02-235de005df2c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.990578] env[70020]: DEBUG oslo_vmware.api [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Task: {'id': task-3617674, 'name': PowerOnVM_Task, 'duration_secs': 0.737233} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.991122] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 619.991333] env[70020]: INFO nova.compute.manager [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Took 14.47 seconds to spawn the instance on the hypervisor. [ 619.991528] env[70020]: DEBUG nova.compute.manager [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 619.992262] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f31ffa59-dc5c-457b-9036-262eee1c24f9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.999361] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16416363-e0b1-4d00-a843-3b05316c7e23 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.017974] env[70020]: DEBUG nova.compute.provider_tree [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 620.028734] env[70020]: DEBUG oslo_vmware.api [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3617676, 'name': PowerOnVM_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.030420] env[70020]: DEBUG nova.network.neutron [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Updating instance_info_cache with network_info: [{"id": "3af5d84e-e814-4689-aa70-e63d58041799", "address": "fa:16:3e:05:20:3a", "network": {"id": "b4313295-7611-4fc1-b8ba-667ae1e29303", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1683239800-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "85abf8ca8009465c87e931b0e9d0fe96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1931669-8959-4e86-a603-e206bcf2b47a", "external-id": "nsx-vlan-transportzone-937", "segmentation_id": 937, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3af5d84e-e8", "ovs_interfaceid": "3af5d84e-e814-4689-aa70-e63d58041799", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.143543] env[70020]: DEBUG nova.compute.manager [req-73d01541-cc10-4ee6-bd3e-ad25b1ade063 req-caebb362-1725-41ba-9973-ec1b90e9d248 service nova] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Received event network-changed-27c6992d-5e25-418c-83e7-a49ce44dee0e {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 620.143742] env[70020]: DEBUG nova.compute.manager [req-73d01541-cc10-4ee6-bd3e-ad25b1ade063 req-caebb362-1725-41ba-9973-ec1b90e9d248 service nova] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Refreshing instance network info cache due to event network-changed-27c6992d-5e25-418c-83e7-a49ce44dee0e. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 620.144716] env[70020]: DEBUG oslo_concurrency.lockutils [req-73d01541-cc10-4ee6-bd3e-ad25b1ade063 req-caebb362-1725-41ba-9973-ec1b90e9d248 service nova] Acquiring lock "refresh_cache-1f95bfa8-bc97-4ed7-8c33-c00297430bf5" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.144716] env[70020]: DEBUG oslo_concurrency.lockutils [req-73d01541-cc10-4ee6-bd3e-ad25b1ade063 req-caebb362-1725-41ba-9973-ec1b90e9d248 service nova] Acquired lock "refresh_cache-1f95bfa8-bc97-4ed7-8c33-c00297430bf5" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 620.144716] env[70020]: DEBUG nova.network.neutron [req-73d01541-cc10-4ee6-bd3e-ad25b1ade063 req-caebb362-1725-41ba-9973-ec1b90e9d248 service nova] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Refreshing network info cache for port 27c6992d-5e25-418c-83e7-a49ce44dee0e {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 620.152892] env[70020]: DEBUG nova.network.neutron [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Successfully created port: 505b9f3d-c597-4acb-8477-fd64b8ea5de1 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 620.417990] env[70020]: DEBUG nova.network.neutron [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 620.523154] env[70020]: INFO nova.compute.manager [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Took 23.04 seconds to build instance. [ 620.528115] env[70020]: DEBUG nova.scheduler.client.report [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 620.538042] env[70020]: DEBUG oslo_concurrency.lockutils [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Releasing lock "refresh_cache-bb4e4986-af2a-4832-9ec7-777bca863dce" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 620.538358] env[70020]: DEBUG nova.compute.manager [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Instance network_info: |[{"id": "3af5d84e-e814-4689-aa70-e63d58041799", "address": "fa:16:3e:05:20:3a", "network": {"id": "b4313295-7611-4fc1-b8ba-667ae1e29303", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1683239800-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "85abf8ca8009465c87e931b0e9d0fe96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1931669-8959-4e86-a603-e206bcf2b47a", "external-id": "nsx-vlan-transportzone-937", "segmentation_id": 937, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3af5d84e-e8", "ovs_interfaceid": "3af5d84e-e814-4689-aa70-e63d58041799", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 620.538712] env[70020]: DEBUG oslo_vmware.api [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3617676, 'name': PowerOnVM_Task, 'duration_secs': 0.656321} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.538964] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:05:20:3a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a1931669-8959-4e86-a603-e206bcf2b47a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3af5d84e-e814-4689-aa70-e63d58041799', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 620.546827] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Creating folder: Project (85abf8ca8009465c87e931b0e9d0fe96). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 620.546827] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 620.546827] env[70020]: INFO nova.compute.manager [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Took 12.76 seconds to spawn the instance on the hypervisor. [ 620.546988] env[70020]: DEBUG nova.compute.manager [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 620.547256] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3686135a-22eb-43b1-839b-7e3ddca9830d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.549944] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f52d570e-df2d-4ade-b6ce-4a056a91b126 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.564475] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Created folder: Project (85abf8ca8009465c87e931b0e9d0fe96) in parent group-v721521. [ 620.564673] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Creating folder: Instances. Parent ref: group-v721543. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 620.565329] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-11fc9b7d-31fd-446d-8739-0fb0563e0a40 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.574429] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Created folder: Instances in parent group-v721543. [ 620.574676] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 620.574865] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 620.575093] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a153bfa9-65a4-4d2a-8f36-050d2e1459ad {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.596207] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 620.596207] env[70020]: value = "task-3617679" [ 620.596207] env[70020]: _type = "Task" [ 620.596207] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.597146] env[70020]: DEBUG nova.network.neutron [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Updating instance_info_cache with network_info: [{"id": "d435ef58-a090-4c51-b69d-6bafa2b6ff27", "address": "fa:16:3e:8e:9f:a9", "network": {"id": "2517afa6-97dc-411e-8146-3238342a5322", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1596315734-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a63e1e3fd80f4c9a89e43f01aeba54bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c797172-a569-458e-aeb0-3f21e589a740", "external-id": "nsx-vlan-transportzone-957", "segmentation_id": 957, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd435ef58-a0", "ovs_interfaceid": "d435ef58-a090-4c51-b69d-6bafa2b6ff27", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.606719] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617679, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.713421] env[70020]: DEBUG nova.compute.manager [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 620.746657] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquiring lock "bc57657e-99e8-46b8-9731-ddd4864a3114" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 620.746885] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "bc57657e-99e8-46b8-9731-ddd4864a3114" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 620.752543] env[70020]: DEBUG nova.virt.hardware [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 620.752916] env[70020]: DEBUG nova.virt.hardware [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 620.753178] env[70020]: DEBUG nova.virt.hardware [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 620.753415] env[70020]: DEBUG nova.virt.hardware [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 620.753572] env[70020]: DEBUG nova.virt.hardware [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 620.753735] env[70020]: DEBUG nova.virt.hardware [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 620.754011] env[70020]: DEBUG nova.virt.hardware [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 620.754266] env[70020]: DEBUG nova.virt.hardware [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 620.754463] env[70020]: DEBUG nova.virt.hardware [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 620.754685] env[70020]: DEBUG nova.virt.hardware [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 620.754943] env[70020]: DEBUG nova.virt.hardware [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 620.756207] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42dd2049-7f65-4b16-98ab-300b09f427bc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.765939] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a89d6eee-4f9a-4d95-8aa6-f2b88bd73386 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.866113] env[70020]: DEBUG nova.network.neutron [req-73d01541-cc10-4ee6-bd3e-ad25b1ade063 req-caebb362-1725-41ba-9973-ec1b90e9d248 service nova] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Updated VIF entry in instance network info cache for port 27c6992d-5e25-418c-83e7-a49ce44dee0e. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 620.866506] env[70020]: DEBUG nova.network.neutron [req-73d01541-cc10-4ee6-bd3e-ad25b1ade063 req-caebb362-1725-41ba-9973-ec1b90e9d248 service nova] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Updating instance_info_cache with network_info: [{"id": "27c6992d-5e25-418c-83e7-a49ce44dee0e", "address": "fa:16:3e:d7:26:c6", "network": {"id": "023ef52b-8a34-4f0e-bc94-cf1fa8c3fb77", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1722246036-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f81ac3e65f9042f4bcf818cd216a32eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27c6992d-5e", "ovs_interfaceid": "27c6992d-5e25-418c-83e7-a49ce44dee0e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.033911] env[70020]: DEBUG oslo_concurrency.lockutils [None req-59106c2f-8193-40dc-920f-59571ab8a6e8 tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Lock "6a114dce-7ed3-46e1-9d50-c3dd6efd340c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.562s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 621.034656] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.356s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 621.035150] env[70020]: DEBUG nova.compute.manager [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 621.041142] env[70020]: DEBUG oslo_concurrency.lockutils [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.634s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 621.041515] env[70020]: INFO nova.compute.claims [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 621.078708] env[70020]: INFO nova.compute.manager [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Took 21.42 seconds to build instance. [ 621.105538] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Releasing lock "refresh_cache-372e5569-8824-4841-b3d6-4b07423c7b3d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 621.105538] env[70020]: DEBUG nova.compute.manager [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Instance network_info: |[{"id": "d435ef58-a090-4c51-b69d-6bafa2b6ff27", "address": "fa:16:3e:8e:9f:a9", "network": {"id": "2517afa6-97dc-411e-8146-3238342a5322", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1596315734-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a63e1e3fd80f4c9a89e43f01aeba54bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c797172-a569-458e-aeb0-3f21e589a740", "external-id": "nsx-vlan-transportzone-957", "segmentation_id": 957, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd435ef58-a0", "ovs_interfaceid": "d435ef58-a090-4c51-b69d-6bafa2b6ff27", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 621.105780] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8e:9f:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1c797172-a569-458e-aeb0-3f21e589a740', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd435ef58-a090-4c51-b69d-6bafa2b6ff27', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 621.113538] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Creating folder: Project (a63e1e3fd80f4c9a89e43f01aeba54bf). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 621.114561] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6eb95ec7-c23a-4a89-989f-daf5154d20ff {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.120145] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617679, 'name': CreateVM_Task, 'duration_secs': 0.373804} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.120760] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 621.121448] env[70020]: DEBUG oslo_concurrency.lockutils [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.121618] env[70020]: DEBUG oslo_concurrency.lockutils [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 621.122270] env[70020]: DEBUG oslo_concurrency.lockutils [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 621.122270] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da9b5876-5c90-42de-a0a4-40db2bdb4ddb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.128052] env[70020]: DEBUG oslo_vmware.api [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Waiting for the task: (returnval){ [ 621.128052] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52054272-31b5-1fc1-4c19-81e8bba5ff19" [ 621.128052] env[70020]: _type = "Task" [ 621.128052] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.129613] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Created folder: Project (a63e1e3fd80f4c9a89e43f01aeba54bf) in parent group-v721521. [ 621.130757] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Creating folder: Instances. Parent ref: group-v721546. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 621.133841] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6e73e396-5a1f-4420-8ac0-47729c235b05 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.140517] env[70020]: DEBUG oslo_vmware.api [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52054272-31b5-1fc1-4c19-81e8bba5ff19, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.143540] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Created folder: Instances in parent group-v721546. [ 621.143540] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 621.143634] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 621.143806] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b18ab5ad-7bf9-4bf7-9316-adc8bdd75cc6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.166910] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 621.166910] env[70020]: value = "task-3617682" [ 621.166910] env[70020]: _type = "Task" [ 621.166910] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.175405] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617682, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.260155] env[70020]: DEBUG nova.compute.manager [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 621.370187] env[70020]: DEBUG oslo_concurrency.lockutils [req-73d01541-cc10-4ee6-bd3e-ad25b1ade063 req-caebb362-1725-41ba-9973-ec1b90e9d248 service nova] Releasing lock "refresh_cache-1f95bfa8-bc97-4ed7-8c33-c00297430bf5" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 621.549103] env[70020]: DEBUG nova.compute.utils [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 621.553532] env[70020]: DEBUG nova.compute.manager [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 621.553725] env[70020]: DEBUG nova.network.neutron [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 621.584692] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5b76d997-0ee8-47b9-8f6e-e34f0fce7250 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Lock "1f95bfa8-bc97-4ed7-8c33-c00297430bf5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.939s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 621.633432] env[70020]: DEBUG nova.policy [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dfd17283a788463d821296e92ca93ef9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4f57b434a8be4f14923fe65d0ed24a72', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 621.646906] env[70020]: DEBUG oslo_vmware.api [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52054272-31b5-1fc1-4c19-81e8bba5ff19, 'name': SearchDatastore_Task, 'duration_secs': 0.024386} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.647263] env[70020]: DEBUG oslo_concurrency.lockutils [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 621.647507] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 621.647735] env[70020]: DEBUG oslo_concurrency.lockutils [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.648259] env[70020]: DEBUG oslo_concurrency.lockutils [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 621.648523] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 621.648812] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5f530a72-2dee-4eef-82e7-cab7c1c7c674 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.658959] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 621.659207] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 621.660129] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86949fac-7262-42e0-957f-21d469f197e6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.665918] env[70020]: DEBUG oslo_vmware.api [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Waiting for the task: (returnval){ [ 621.665918] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52feb39b-d2e6-879d-6c84-bbef3e8746b2" [ 621.665918] env[70020]: _type = "Task" [ 621.665918] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.683461] env[70020]: DEBUG oslo_vmware.api [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52feb39b-d2e6-879d-6c84-bbef3e8746b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.687259] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617682, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.772489] env[70020]: DEBUG nova.compute.manager [req-53378a55-7697-4762-b99e-321c50bb62fc req-ff9258ce-0207-4001-9f8c-d0fc10c011df service nova] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Received event network-vif-plugged-3af5d84e-e814-4689-aa70-e63d58041799 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 621.772583] env[70020]: DEBUG oslo_concurrency.lockutils [req-53378a55-7697-4762-b99e-321c50bb62fc req-ff9258ce-0207-4001-9f8c-d0fc10c011df service nova] Acquiring lock "bb4e4986-af2a-4832-9ec7-777bca863dce-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 621.772744] env[70020]: DEBUG oslo_concurrency.lockutils [req-53378a55-7697-4762-b99e-321c50bb62fc req-ff9258ce-0207-4001-9f8c-d0fc10c011df service nova] Lock "bb4e4986-af2a-4832-9ec7-777bca863dce-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 621.772924] env[70020]: DEBUG oslo_concurrency.lockutils [req-53378a55-7697-4762-b99e-321c50bb62fc req-ff9258ce-0207-4001-9f8c-d0fc10c011df service nova] Lock "bb4e4986-af2a-4832-9ec7-777bca863dce-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 621.773116] env[70020]: DEBUG nova.compute.manager [req-53378a55-7697-4762-b99e-321c50bb62fc req-ff9258ce-0207-4001-9f8c-d0fc10c011df service nova] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] No waiting events found dispatching network-vif-plugged-3af5d84e-e814-4689-aa70-e63d58041799 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 621.773279] env[70020]: WARNING nova.compute.manager [req-53378a55-7697-4762-b99e-321c50bb62fc req-ff9258ce-0207-4001-9f8c-d0fc10c011df service nova] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Received unexpected event network-vif-plugged-3af5d84e-e814-4689-aa70-e63d58041799 for instance with vm_state building and task_state spawning. [ 621.785864] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 621.977178] env[70020]: DEBUG nova.network.neutron [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Successfully created port: 16a8d745-ea55-4e94-9513-0b5547738678 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 622.054266] env[70020]: DEBUG nova.compute.manager [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 622.183436] env[70020]: DEBUG oslo_vmware.api [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52feb39b-d2e6-879d-6c84-bbef3e8746b2, 'name': SearchDatastore_Task, 'duration_secs': 0.016432} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.186816] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9083ecb-0aff-4e01-8b67-f43986631893 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.194167] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617682, 'name': CreateVM_Task, 'duration_secs': 0.553814} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.194771] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 622.195682] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.195869] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 622.197010] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 622.200457] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26aa66d7-cdba-4518-9446-c4a79d96aa31 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.200457] env[70020]: DEBUG oslo_vmware.api [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Waiting for the task: (returnval){ [ 622.200457] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52309546-53b7-2a74-f2ed-8f9aac7c168a" [ 622.200457] env[70020]: _type = "Task" [ 622.200457] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.208807] env[70020]: DEBUG oslo_vmware.api [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Waiting for the task: (returnval){ [ 622.208807] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5255a17e-44e1-fa24-d1f0-7a154d787ec5" [ 622.208807] env[70020]: _type = "Task" [ 622.208807] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.216437] env[70020]: DEBUG oslo_vmware.api [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52309546-53b7-2a74-f2ed-8f9aac7c168a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.227985] env[70020]: DEBUG oslo_vmware.api [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5255a17e-44e1-fa24-d1f0-7a154d787ec5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.233978] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Acquiring lock "08ce6bc8-30fe-4c63-80e1-26c84ae75702" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 622.233978] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "08ce6bc8-30fe-4c63-80e1-26c84ae75702" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 622.350680] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94cf50d9-5082-45ab-aced-d6d9e7989fd2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.361540] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25465522-6b3a-4c13-8a05-92026f6faf55 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.370214] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Acquiring lock "d0756709-f17b-441e-b537-df937cfbde84" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 622.370491] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Lock "d0756709-f17b-441e-b537-df937cfbde84" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 622.407050] env[70020]: DEBUG nova.compute.manager [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 622.410551] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1029623a-0cae-45af-9310-e84d691febfd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.421733] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23db1d3e-bcec-4c85-829c-0cf81144b5f3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.438355] env[70020]: DEBUG nova.compute.provider_tree [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 622.454127] env[70020]: DEBUG nova.network.neutron [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Successfully updated port: 505b9f3d-c597-4acb-8477-fd64b8ea5de1 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 622.538921] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bc4f5e44-bdac-464e-ab67-231ceb3db221 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "81d5a1b4-1398-4fca-b500-aa2a3dc41494" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 622.539084] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bc4f5e44-bdac-464e-ab67-231ceb3db221 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "81d5a1b4-1398-4fca-b500-aa2a3dc41494" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 622.539355] env[70020]: DEBUG nova.compute.manager [None req-bc4f5e44-bdac-464e-ab67-231ceb3db221 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 622.540852] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-518c8bdd-f158-45e5-80d0-eb4997fac467 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.548543] env[70020]: DEBUG nova.compute.manager [None req-bc4f5e44-bdac-464e-ab67-231ceb3db221 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=70020) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 622.549210] env[70020]: DEBUG nova.objects.instance [None req-bc4f5e44-bdac-464e-ab67-231ceb3db221 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lazy-loading 'flavor' on Instance uuid 81d5a1b4-1398-4fca-b500-aa2a3dc41494 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 622.640837] env[70020]: DEBUG oslo_concurrency.lockutils [None req-913786dc-d745-4d54-a251-6dc9c1a60ea9 tempest-DeleteServersAdminTestJSON-409072902 tempest-DeleteServersAdminTestJSON-409072902-project-admin] Acquiring lock "516341a3-2230-4340-a1e0-ff97bb7a608d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 622.641176] env[70020]: DEBUG oslo_concurrency.lockutils [None req-913786dc-d745-4d54-a251-6dc9c1a60ea9 tempest-DeleteServersAdminTestJSON-409072902 tempest-DeleteServersAdminTestJSON-409072902-project-admin] Lock "516341a3-2230-4340-a1e0-ff97bb7a608d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 622.641386] env[70020]: DEBUG oslo_concurrency.lockutils [None req-913786dc-d745-4d54-a251-6dc9c1a60ea9 tempest-DeleteServersAdminTestJSON-409072902 tempest-DeleteServersAdminTestJSON-409072902-project-admin] Acquiring lock "516341a3-2230-4340-a1e0-ff97bb7a608d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 622.641596] env[70020]: DEBUG oslo_concurrency.lockutils [None req-913786dc-d745-4d54-a251-6dc9c1a60ea9 tempest-DeleteServersAdminTestJSON-409072902 tempest-DeleteServersAdminTestJSON-409072902-project-admin] Lock "516341a3-2230-4340-a1e0-ff97bb7a608d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 622.642357] env[70020]: DEBUG oslo_concurrency.lockutils [None req-913786dc-d745-4d54-a251-6dc9c1a60ea9 tempest-DeleteServersAdminTestJSON-409072902 tempest-DeleteServersAdminTestJSON-409072902-project-admin] Lock "516341a3-2230-4340-a1e0-ff97bb7a608d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 622.648908] env[70020]: INFO nova.compute.manager [None req-913786dc-d745-4d54-a251-6dc9c1a60ea9 tempest-DeleteServersAdminTestJSON-409072902 tempest-DeleteServersAdminTestJSON-409072902-project-admin] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Terminating instance [ 622.716776] env[70020]: DEBUG oslo_vmware.api [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52309546-53b7-2a74-f2ed-8f9aac7c168a, 'name': SearchDatastore_Task, 'duration_secs': 0.015866} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.717472] env[70020]: DEBUG oslo_concurrency.lockutils [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 622.717736] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] bb4e4986-af2a-4832-9ec7-777bca863dce/bb4e4986-af2a-4832-9ec7-777bca863dce.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 622.717990] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-44e24a7b-7898-49b4-9a08-b3b0ef722d09 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.723792] env[70020]: DEBUG oslo_vmware.api [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5255a17e-44e1-fa24-d1f0-7a154d787ec5, 'name': SearchDatastore_Task, 'duration_secs': 0.024797} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.724457] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 622.725057] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 622.725364] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.725525] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 622.725701] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 622.726287] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4d9c9db6-c2cb-46ef-9cfa-b6b684611ff4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.729738] env[70020]: DEBUG oslo_vmware.api [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Waiting for the task: (returnval){ [ 622.729738] env[70020]: value = "task-3617683" [ 622.729738] env[70020]: _type = "Task" [ 622.729738] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.735841] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 622.737639] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 622.739722] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1abf2ee-0a5c-4a96-a373-8ee3150c336c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.743173] env[70020]: DEBUG nova.compute.manager [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 622.744770] env[70020]: DEBUG oslo_vmware.api [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Task: {'id': task-3617683, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.748491] env[70020]: DEBUG oslo_vmware.api [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Waiting for the task: (returnval){ [ 622.748491] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52f105f3-c5b0-777c-f9da-1709a6c7919a" [ 622.748491] env[70020]: _type = "Task" [ 622.748491] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.758388] env[70020]: DEBUG oslo_vmware.api [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52f105f3-c5b0-777c-f9da-1709a6c7919a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.940088] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 622.941403] env[70020]: DEBUG nova.scheduler.client.report [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 622.958647] env[70020]: DEBUG oslo_concurrency.lockutils [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Acquiring lock "refresh_cache-a0b4a0b0-748d-46eb-9e39-3f21e394c090" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.958647] env[70020]: DEBUG oslo_concurrency.lockutils [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Acquired lock "refresh_cache-a0b4a0b0-748d-46eb-9e39-3f21e394c090" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 622.958647] env[70020]: DEBUG nova.network.neutron [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 623.064000] env[70020]: DEBUG nova.compute.manager [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 623.101577] env[70020]: DEBUG nova.virt.hardware [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 623.101930] env[70020]: DEBUG nova.virt.hardware [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 623.102159] env[70020]: DEBUG nova.virt.hardware [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 623.102350] env[70020]: DEBUG nova.virt.hardware [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 623.102497] env[70020]: DEBUG nova.virt.hardware [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 623.102844] env[70020]: DEBUG nova.virt.hardware [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 623.102965] env[70020]: DEBUG nova.virt.hardware [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 623.103127] env[70020]: DEBUG nova.virt.hardware [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 623.103567] env[70020]: DEBUG nova.virt.hardware [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 623.103815] env[70020]: DEBUG nova.virt.hardware [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 623.104066] env[70020]: DEBUG nova.virt.hardware [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 623.104994] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aee09e8-adab-4db6-b2f6-02a3a422c59c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.117797] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22c29768-8096-4635-97f6-41ab66419a73 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.160293] env[70020]: INFO nova.compute.manager [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Rebuilding instance [ 623.161182] env[70020]: DEBUG nova.compute.manager [None req-913786dc-d745-4d54-a251-6dc9c1a60ea9 tempest-DeleteServersAdminTestJSON-409072902 tempest-DeleteServersAdminTestJSON-409072902-project-admin] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 623.161747] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-913786dc-d745-4d54-a251-6dc9c1a60ea9 tempest-DeleteServersAdminTestJSON-409072902 tempest-DeleteServersAdminTestJSON-409072902-project-admin] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 623.164508] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-493e0926-6a44-4c46-a30b-1dc0fec44b41 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.174334] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-913786dc-d745-4d54-a251-6dc9c1a60ea9 tempest-DeleteServersAdminTestJSON-409072902 tempest-DeleteServersAdminTestJSON-409072902-project-admin] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 623.174334] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ad3f3a13-1a62-42e3-9d4e-2420cd682cd8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.181077] env[70020]: DEBUG oslo_vmware.api [None req-913786dc-d745-4d54-a251-6dc9c1a60ea9 tempest-DeleteServersAdminTestJSON-409072902 tempest-DeleteServersAdminTestJSON-409072902-project-admin] Waiting for the task: (returnval){ [ 623.181077] env[70020]: value = "task-3617684" [ 623.181077] env[70020]: _type = "Task" [ 623.181077] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.201231] env[70020]: DEBUG oslo_vmware.api [None req-913786dc-d745-4d54-a251-6dc9c1a60ea9 tempest-DeleteServersAdminTestJSON-409072902 tempest-DeleteServersAdminTestJSON-409072902-project-admin] Task: {'id': task-3617684, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.219020] env[70020]: DEBUG nova.compute.manager [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 623.219020] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6eba8f5-ee19-44db-ae44-2268a08b0c27 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.240462] env[70020]: DEBUG oslo_vmware.api [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Task: {'id': task-3617683, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.260266] env[70020]: DEBUG oslo_vmware.api [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52f105f3-c5b0-777c-f9da-1709a6c7919a, 'name': SearchDatastore_Task, 'duration_secs': 0.010844} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.261310] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4658e935-9842-4153-b54a-5aba2609f440 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.267714] env[70020]: DEBUG oslo_vmware.api [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Waiting for the task: (returnval){ [ 623.267714] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52ea1fc3-2f8e-507e-a456-e34aa956727b" [ 623.267714] env[70020]: _type = "Task" [ 623.267714] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.273451] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 623.279813] env[70020]: DEBUG oslo_vmware.api [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ea1fc3-2f8e-507e-a456-e34aa956727b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.447875] env[70020]: DEBUG oslo_concurrency.lockutils [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.408s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 623.448704] env[70020]: DEBUG nova.compute.manager [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 623.453661] env[70020]: DEBUG oslo_concurrency.lockutils [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.016s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 623.453661] env[70020]: INFO nova.compute.claims [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 623.501593] env[70020]: DEBUG nova.network.neutron [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Successfully updated port: 16a8d745-ea55-4e94-9513-0b5547738678 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 623.557380] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc4f5e44-bdac-464e-ab67-231ceb3db221 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 623.557717] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-23be1fd1-55f9-49f4-b7c9-73090ff4277b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.565792] env[70020]: DEBUG oslo_vmware.api [None req-bc4f5e44-bdac-464e-ab67-231ceb3db221 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 623.565792] env[70020]: value = "task-3617685" [ 623.565792] env[70020]: _type = "Task" [ 623.565792] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.575247] env[70020]: DEBUG oslo_vmware.api [None req-bc4f5e44-bdac-464e-ab67-231ceb3db221 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617685, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.575731] env[70020]: DEBUG nova.network.neutron [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 623.693549] env[70020]: DEBUG oslo_vmware.api [None req-913786dc-d745-4d54-a251-6dc9c1a60ea9 tempest-DeleteServersAdminTestJSON-409072902 tempest-DeleteServersAdminTestJSON-409072902-project-admin] Task: {'id': task-3617684, 'name': PowerOffVM_Task, 'duration_secs': 0.254061} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.694013] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-913786dc-d745-4d54-a251-6dc9c1a60ea9 tempest-DeleteServersAdminTestJSON-409072902 tempest-DeleteServersAdminTestJSON-409072902-project-admin] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 623.694013] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-913786dc-d745-4d54-a251-6dc9c1a60ea9 tempest-DeleteServersAdminTestJSON-409072902 tempest-DeleteServersAdminTestJSON-409072902-project-admin] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 623.695214] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7b246fa2-d591-4a51-83e2-b08253e4aa11 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.709893] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquiring lock "4b5750d4-98ec-4c70-b214-fad97060b606" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 623.710130] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Lock "4b5750d4-98ec-4c70-b214-fad97060b606" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 623.741597] env[70020]: DEBUG oslo_vmware.api [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Task: {'id': task-3617683, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.511585} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.741879] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] bb4e4986-af2a-4832-9ec7-777bca863dce/bb4e4986-af2a-4832-9ec7-777bca863dce.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 623.742242] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 623.742509] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b6a810d6-5ef5-4b78-97c5-1897b83b4490 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.749654] env[70020]: DEBUG oslo_vmware.api [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Waiting for the task: (returnval){ [ 623.749654] env[70020]: value = "task-3617687" [ 623.749654] env[70020]: _type = "Task" [ 623.749654] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.757843] env[70020]: DEBUG oslo_vmware.api [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Task: {'id': task-3617687, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.773463] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-913786dc-d745-4d54-a251-6dc9c1a60ea9 tempest-DeleteServersAdminTestJSON-409072902 tempest-DeleteServersAdminTestJSON-409072902-project-admin] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 623.773681] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-913786dc-d745-4d54-a251-6dc9c1a60ea9 tempest-DeleteServersAdminTestJSON-409072902 tempest-DeleteServersAdminTestJSON-409072902-project-admin] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 623.773939] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-913786dc-d745-4d54-a251-6dc9c1a60ea9 tempest-DeleteServersAdminTestJSON-409072902 tempest-DeleteServersAdminTestJSON-409072902-project-admin] Deleting the datastore file [datastore1] 516341a3-2230-4340-a1e0-ff97bb7a608d {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 623.774514] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-054ef886-7524-48c4-9c3d-c75d8a9b0669 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.780817] env[70020]: DEBUG oslo_vmware.api [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ea1fc3-2f8e-507e-a456-e34aa956727b, 'name': SearchDatastore_Task, 'duration_secs': 0.017493} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.781023] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 623.781307] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 372e5569-8824-4841-b3d6-4b07423c7b3d/372e5569-8824-4841-b3d6-4b07423c7b3d.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 623.781552] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dba25bfb-d179-4ec0-a33f-32de6c0bed00 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.784926] env[70020]: DEBUG oslo_vmware.api [None req-913786dc-d745-4d54-a251-6dc9c1a60ea9 tempest-DeleteServersAdminTestJSON-409072902 tempest-DeleteServersAdminTestJSON-409072902-project-admin] Waiting for the task: (returnval){ [ 623.784926] env[70020]: value = "task-3617688" [ 623.784926] env[70020]: _type = "Task" [ 623.784926] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.789388] env[70020]: DEBUG oslo_vmware.api [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Waiting for the task: (returnval){ [ 623.789388] env[70020]: value = "task-3617689" [ 623.789388] env[70020]: _type = "Task" [ 623.789388] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.795873] env[70020]: DEBUG oslo_vmware.api [None req-913786dc-d745-4d54-a251-6dc9c1a60ea9 tempest-DeleteServersAdminTestJSON-409072902 tempest-DeleteServersAdminTestJSON-409072902-project-admin] Task: {'id': task-3617688, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.800802] env[70020]: DEBUG oslo_vmware.api [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Task: {'id': task-3617689, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.948328] env[70020]: DEBUG nova.network.neutron [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Updating instance_info_cache with network_info: [{"id": "505b9f3d-c597-4acb-8477-fd64b8ea5de1", "address": "fa:16:3e:0d:e7:c0", "network": {"id": "665d0c23-de7f-46c9-b7d3-0e8705495b1b", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-2043355770-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "b6039c5c57424a98bbbc8c0f38e9741f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7cd4cea-788c-4e6d-9df8-5d83838e2e2a", "external-id": "nsx-vlan-transportzone-361", "segmentation_id": 361, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap505b9f3d-c5", "ovs_interfaceid": "505b9f3d-c597-4acb-8477-fd64b8ea5de1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 623.961108] env[70020]: DEBUG nova.compute.utils [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 623.964855] env[70020]: DEBUG nova.compute.manager [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 623.964855] env[70020]: DEBUG nova.network.neutron [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 624.004797] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Acquiring lock "refresh_cache-79d98176-b566-4349-ad10-c2ea6fdbc657" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 624.004981] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Acquired lock "refresh_cache-79d98176-b566-4349-ad10-c2ea6fdbc657" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 624.007359] env[70020]: DEBUG nova.network.neutron [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 624.044878] env[70020]: DEBUG nova.policy [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '41059e43a7644fa4876da5770e24f735', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'add37b0346e74e7f9724e69253e2cffc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 624.081075] env[70020]: DEBUG oslo_vmware.api [None req-bc4f5e44-bdac-464e-ab67-231ceb3db221 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617685, 'name': PowerOffVM_Task, 'duration_secs': 0.360612} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.081544] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc4f5e44-bdac-464e-ab67-231ceb3db221 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 624.081819] env[70020]: DEBUG nova.compute.manager [None req-bc4f5e44-bdac-464e-ab67-231ceb3db221 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 624.082754] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c233ac2-6ae6-4c7d-bfbd-9eaec21c8c30 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.212278] env[70020]: DEBUG nova.compute.manager [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 624.236279] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 624.236629] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-43f9ba6d-bab5-4e7f-9d6c-b89f44300cb0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.244773] env[70020]: DEBUG oslo_vmware.api [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Waiting for the task: (returnval){ [ 624.244773] env[70020]: value = "task-3617690" [ 624.244773] env[70020]: _type = "Task" [ 624.244773] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.258054] env[70020]: DEBUG oslo_vmware.api [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': task-3617690, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.263804] env[70020]: DEBUG oslo_vmware.api [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Task: {'id': task-3617687, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.133744} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.264982] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 624.265207] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9df5fb64-bf70-4a26-abc0-0d640124dfa2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.293223] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Reconfiguring VM instance instance-00000008 to attach disk [datastore1] bb4e4986-af2a-4832-9ec7-777bca863dce/bb4e4986-af2a-4832-9ec7-777bca863dce.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 624.295616] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-daeaddd1-0338-49d6-bee2-52cb655c7aa9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.329227] env[70020]: DEBUG oslo_vmware.api [None req-913786dc-d745-4d54-a251-6dc9c1a60ea9 tempest-DeleteServersAdminTestJSON-409072902 tempest-DeleteServersAdminTestJSON-409072902-project-admin] Task: {'id': task-3617688, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.334524] env[70020]: DEBUG oslo_vmware.api [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Task: {'id': task-3617689, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.334998] env[70020]: DEBUG oslo_vmware.api [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Waiting for the task: (returnval){ [ 624.334998] env[70020]: value = "task-3617691" [ 624.334998] env[70020]: _type = "Task" [ 624.334998] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.345229] env[70020]: DEBUG oslo_vmware.api [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Task: {'id': task-3617691, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.453239] env[70020]: DEBUG oslo_concurrency.lockutils [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Releasing lock "refresh_cache-a0b4a0b0-748d-46eb-9e39-3f21e394c090" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 624.453239] env[70020]: DEBUG nova.compute.manager [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Instance network_info: |[{"id": "505b9f3d-c597-4acb-8477-fd64b8ea5de1", "address": "fa:16:3e:0d:e7:c0", "network": {"id": "665d0c23-de7f-46c9-b7d3-0e8705495b1b", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-2043355770-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "b6039c5c57424a98bbbc8c0f38e9741f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7cd4cea-788c-4e6d-9df8-5d83838e2e2a", "external-id": "nsx-vlan-transportzone-361", "segmentation_id": 361, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap505b9f3d-c5", "ovs_interfaceid": "505b9f3d-c597-4acb-8477-fd64b8ea5de1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 624.453618] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0d:e7:c0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7cd4cea-788c-4e6d-9df8-5d83838e2e2a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '505b9f3d-c597-4acb-8477-fd64b8ea5de1', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 624.461652] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Creating folder: Project (b6039c5c57424a98bbbc8c0f38e9741f). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 624.462163] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4459fc1c-a76d-4c91-8309-e44599f6af11 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.464796] env[70020]: DEBUG nova.compute.manager [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 624.479681] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Created folder: Project (b6039c5c57424a98bbbc8c0f38e9741f) in parent group-v721521. [ 624.479880] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Creating folder: Instances. Parent ref: group-v721549. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 624.480129] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0118ec02-35be-4562-8449-69b1f0efdc83 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.489999] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Created folder: Instances in parent group-v721549. [ 624.490266] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 624.490491] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 624.490700] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e4bf7369-7746-4b3f-ab0e-854fae06fac5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.514160] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 624.514160] env[70020]: value = "task-3617694" [ 624.514160] env[70020]: _type = "Task" [ 624.514160] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.526150] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617694, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.562296] env[70020]: DEBUG nova.network.neutron [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 624.598772] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bc4f5e44-bdac-464e-ab67-231ceb3db221 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "81d5a1b4-1398-4fca-b500-aa2a3dc41494" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.060s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 624.612864] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 624.613111] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 624.620817] env[70020]: DEBUG nova.compute.manager [req-c4f597f0-3f64-4b76-afa6-8a6a82f3d200 req-f46f0915-d9fd-44ce-b9f1-6fd6bf30b6af service nova] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Received event network-vif-deleted-eb757fe7-6cda-466e-9979-29e56b057f1c {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 624.620959] env[70020]: DEBUG nova.compute.manager [req-c4f597f0-3f64-4b76-afa6-8a6a82f3d200 req-f46f0915-d9fd-44ce-b9f1-6fd6bf30b6af service nova] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Received event network-vif-plugged-d435ef58-a090-4c51-b69d-6bafa2b6ff27 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 624.621120] env[70020]: DEBUG oslo_concurrency.lockutils [req-c4f597f0-3f64-4b76-afa6-8a6a82f3d200 req-f46f0915-d9fd-44ce-b9f1-6fd6bf30b6af service nova] Acquiring lock "372e5569-8824-4841-b3d6-4b07423c7b3d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 624.621319] env[70020]: DEBUG oslo_concurrency.lockutils [req-c4f597f0-3f64-4b76-afa6-8a6a82f3d200 req-f46f0915-d9fd-44ce-b9f1-6fd6bf30b6af service nova] Lock "372e5569-8824-4841-b3d6-4b07423c7b3d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 624.621511] env[70020]: DEBUG oslo_concurrency.lockutils [req-c4f597f0-3f64-4b76-afa6-8a6a82f3d200 req-f46f0915-d9fd-44ce-b9f1-6fd6bf30b6af service nova] Lock "372e5569-8824-4841-b3d6-4b07423c7b3d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 624.622385] env[70020]: DEBUG nova.compute.manager [req-c4f597f0-3f64-4b76-afa6-8a6a82f3d200 req-f46f0915-d9fd-44ce-b9f1-6fd6bf30b6af service nova] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] No waiting events found dispatching network-vif-plugged-d435ef58-a090-4c51-b69d-6bafa2b6ff27 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 624.622385] env[70020]: WARNING nova.compute.manager [req-c4f597f0-3f64-4b76-afa6-8a6a82f3d200 req-f46f0915-d9fd-44ce-b9f1-6fd6bf30b6af service nova] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Received unexpected event network-vif-plugged-d435ef58-a090-4c51-b69d-6bafa2b6ff27 for instance with vm_state building and task_state spawning. [ 624.622783] env[70020]: DEBUG nova.compute.manager [req-c4f597f0-3f64-4b76-afa6-8a6a82f3d200 req-f46f0915-d9fd-44ce-b9f1-6fd6bf30b6af service nova] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Received event network-changed-d435ef58-a090-4c51-b69d-6bafa2b6ff27 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 624.622783] env[70020]: DEBUG nova.compute.manager [req-c4f597f0-3f64-4b76-afa6-8a6a82f3d200 req-f46f0915-d9fd-44ce-b9f1-6fd6bf30b6af service nova] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Refreshing instance network info cache due to event network-changed-d435ef58-a090-4c51-b69d-6bafa2b6ff27. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 624.623057] env[70020]: DEBUG oslo_concurrency.lockutils [req-c4f597f0-3f64-4b76-afa6-8a6a82f3d200 req-f46f0915-d9fd-44ce-b9f1-6fd6bf30b6af service nova] Acquiring lock "refresh_cache-372e5569-8824-4841-b3d6-4b07423c7b3d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 624.623217] env[70020]: DEBUG oslo_concurrency.lockutils [req-c4f597f0-3f64-4b76-afa6-8a6a82f3d200 req-f46f0915-d9fd-44ce-b9f1-6fd6bf30b6af service nova] Acquired lock "refresh_cache-372e5569-8824-4841-b3d6-4b07423c7b3d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 624.623380] env[70020]: DEBUG nova.network.neutron [req-c4f597f0-3f64-4b76-afa6-8a6a82f3d200 req-f46f0915-d9fd-44ce-b9f1-6fd6bf30b6af service nova] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Refreshing network info cache for port d435ef58-a090-4c51-b69d-6bafa2b6ff27 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 624.664861] env[70020]: DEBUG nova.network.neutron [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Successfully created port: 4e709a63-45c3-48e8-8762-26e149c61266 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 624.736655] env[70020]: DEBUG nova.network.neutron [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Updating instance_info_cache with network_info: [{"id": "16a8d745-ea55-4e94-9513-0b5547738678", "address": "fa:16:3e:fe:cb:ea", "network": {"id": "53830a9c-7c1f-4b18-ad07-2379bc00d366", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1462386900-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "4f57b434a8be4f14923fe65d0ed24a72", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16a8d745-ea", "ovs_interfaceid": "16a8d745-ea55-4e94-9513-0b5547738678", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 624.740933] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 624.758112] env[70020]: DEBUG oslo_vmware.api [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': task-3617690, 'name': PowerOffVM_Task, 'duration_secs': 0.164512} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.758383] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 624.758609] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 624.759401] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e1c998f-b405-43b1-bdad-9a0bec5b7738 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.767770] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 624.768030] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d4bfc847-a44d-420e-b53c-000ff774e698 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.781011] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43b98a6a-576f-4532-831e-3a2c4dcfaafa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.788851] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c04d103c-1ae0-4591-b0ab-93df203d55bb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.808276] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 624.808643] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 624.808982] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Deleting the datastore file [datastore1] b0b825d4-534d-4d54-a0c4-b9e507726c47 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 624.833870] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c6c5801f-2d6f-44e5-a90c-0b5eeccbe9ce {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.839420] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a1439a2-65d1-4219-bbfb-a4c6f239785d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.851471] env[70020]: DEBUG oslo_vmware.api [None req-913786dc-d745-4d54-a251-6dc9c1a60ea9 tempest-DeleteServersAdminTestJSON-409072902 tempest-DeleteServersAdminTestJSON-409072902-project-admin] Task: {'id': task-3617688, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.642254} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.851899] env[70020]: DEBUG oslo_vmware.api [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Task: {'id': task-3617689, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.582071} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.853338] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-913786dc-d745-4d54-a251-6dc9c1a60ea9 tempest-DeleteServersAdminTestJSON-409072902 tempest-DeleteServersAdminTestJSON-409072902-project-admin] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 624.853597] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-913786dc-d745-4d54-a251-6dc9c1a60ea9 tempest-DeleteServersAdminTestJSON-409072902 tempest-DeleteServersAdminTestJSON-409072902-project-admin] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 624.853799] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-913786dc-d745-4d54-a251-6dc9c1a60ea9 tempest-DeleteServersAdminTestJSON-409072902 tempest-DeleteServersAdminTestJSON-409072902-project-admin] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 624.853974] env[70020]: INFO nova.compute.manager [None req-913786dc-d745-4d54-a251-6dc9c1a60ea9 tempest-DeleteServersAdminTestJSON-409072902 tempest-DeleteServersAdminTestJSON-409072902-project-admin] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Took 1.69 seconds to destroy the instance on the hypervisor. [ 624.854231] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-913786dc-d745-4d54-a251-6dc9c1a60ea9 tempest-DeleteServersAdminTestJSON-409072902 tempest-DeleteServersAdminTestJSON-409072902-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 624.854420] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 372e5569-8824-4841-b3d6-4b07423c7b3d/372e5569-8824-4841-b3d6-4b07423c7b3d.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 624.854608] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 624.855218] env[70020]: DEBUG nova.compute.manager [-] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 624.855325] env[70020]: DEBUG nova.network.neutron [-] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 624.857378] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ed14db3d-cb4d-4067-825e-07eb6f314148 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.865701] env[70020]: DEBUG oslo_vmware.api [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Task: {'id': task-3617691, 'name': ReconfigVM_Task, 'duration_secs': 0.516866} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.865923] env[70020]: DEBUG oslo_vmware.api [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Waiting for the task: (returnval){ [ 624.865923] env[70020]: value = "task-3617696" [ 624.865923] env[70020]: _type = "Task" [ 624.865923] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.867158] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Reconfigured VM instance instance-00000008 to attach disk [datastore1] bb4e4986-af2a-4832-9ec7-777bca863dce/bb4e4986-af2a-4832-9ec7-777bca863dce.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 624.870279] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-204cabda-11cd-4f04-95a6-b2bd1adceb2a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.877316] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aba8d91-6c62-4b34-af74-76a1772e92e6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.882099] env[70020]: DEBUG oslo_vmware.api [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Waiting for the task: (returnval){ [ 624.882099] env[70020]: value = "task-3617697" [ 624.882099] env[70020]: _type = "Task" [ 624.882099] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.901230] env[70020]: DEBUG nova.compute.provider_tree [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 624.902724] env[70020]: DEBUG oslo_vmware.api [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Waiting for the task: (returnval){ [ 624.902724] env[70020]: value = "task-3617698" [ 624.902724] env[70020]: _type = "Task" [ 624.902724] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.905863] env[70020]: DEBUG oslo_vmware.api [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': task-3617696, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.910657] env[70020]: DEBUG oslo_vmware.api [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Task: {'id': task-3617697, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.916508] env[70020]: DEBUG oslo_vmware.api [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Task: {'id': task-3617698, 'name': Rename_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.024996] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617694, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.169358] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Acquiring lock "3501a6fc-f090-4098-8f63-57a97bd61f1b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 625.169358] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Lock "3501a6fc-f090-4098-8f63-57a97bd61f1b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 625.217137] env[70020]: DEBUG nova.compute.manager [None req-37b994e9-d319-4fd4-a224-687a68632213 tempest-ServerDiagnosticsV248Test-998026657 tempest-ServerDiagnosticsV248Test-998026657-project-admin] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 625.218240] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41bdc693-b6de-4617-9307-72a1ba27a3d8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.226362] env[70020]: INFO nova.compute.manager [None req-37b994e9-d319-4fd4-a224-687a68632213 tempest-ServerDiagnosticsV248Test-998026657 tempest-ServerDiagnosticsV248Test-998026657-project-admin] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Retrieving diagnostics [ 625.228216] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0db47cb-a49e-4ccb-94e4-a91829b13111 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.261186] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Releasing lock "refresh_cache-79d98176-b566-4349-ad10-c2ea6fdbc657" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 625.261479] env[70020]: DEBUG nova.compute.manager [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Instance network_info: |[{"id": "16a8d745-ea55-4e94-9513-0b5547738678", "address": "fa:16:3e:fe:cb:ea", "network": {"id": "53830a9c-7c1f-4b18-ad07-2379bc00d366", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1462386900-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "4f57b434a8be4f14923fe65d0ed24a72", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16a8d745-ea", "ovs_interfaceid": "16a8d745-ea55-4e94-9513-0b5547738678", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 625.265634] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:cb:ea', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '16a8d745-ea55-4e94-9513-0b5547738678', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 625.272766] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Creating folder: Project (4f57b434a8be4f14923fe65d0ed24a72). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 625.273288] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e429e687-3dc4-451f-8f8e-cc510898b2d4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.285272] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Created folder: Project (4f57b434a8be4f14923fe65d0ed24a72) in parent group-v721521. [ 625.285484] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Creating folder: Instances. Parent ref: group-v721552. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 625.286007] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-58898964-0a9e-4ead-881f-3cffbf716cac {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.294937] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Created folder: Instances in parent group-v721552. [ 625.295175] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 625.295428] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 625.295535] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a7eae123-4aca-4b45-a1f1-e097ab645712 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.316809] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 625.316809] env[70020]: value = "task-3617701" [ 625.316809] env[70020]: _type = "Task" [ 625.316809] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.324073] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617701, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.377901] env[70020]: DEBUG oslo_vmware.api [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': task-3617696, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.123992} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.378038] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 625.378220] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 625.378435] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 625.382056] env[70020]: DEBUG nova.network.neutron [req-c4f597f0-3f64-4b76-afa6-8a6a82f3d200 req-f46f0915-d9fd-44ce-b9f1-6fd6bf30b6af service nova] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Updated VIF entry in instance network info cache for port d435ef58-a090-4c51-b69d-6bafa2b6ff27. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 625.382389] env[70020]: DEBUG nova.network.neutron [req-c4f597f0-3f64-4b76-afa6-8a6a82f3d200 req-f46f0915-d9fd-44ce-b9f1-6fd6bf30b6af service nova] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Updating instance_info_cache with network_info: [{"id": "d435ef58-a090-4c51-b69d-6bafa2b6ff27", "address": "fa:16:3e:8e:9f:a9", "network": {"id": "2517afa6-97dc-411e-8146-3238342a5322", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1596315734-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a63e1e3fd80f4c9a89e43f01aeba54bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c797172-a569-458e-aeb0-3f21e589a740", "external-id": "nsx-vlan-transportzone-957", "segmentation_id": 957, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd435ef58-a0", "ovs_interfaceid": "d435ef58-a090-4c51-b69d-6bafa2b6ff27", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.393155] env[70020]: DEBUG oslo_vmware.api [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Task: {'id': task-3617697, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070809} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.394312] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 625.395101] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-832a1e9e-89b7-4120-a91b-a183898637c4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.412523] env[70020]: DEBUG nova.scheduler.client.report [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 625.424867] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Reconfiguring VM instance instance-00000009 to attach disk [datastore2] 372e5569-8824-4841-b3d6-4b07423c7b3d/372e5569-8824-4841-b3d6-4b07423c7b3d.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 625.429029] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-265d4007-30aa-44ee-ba2f-40850217dd5d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.449580] env[70020]: DEBUG oslo_vmware.api [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Task: {'id': task-3617698, 'name': Rename_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.450994] env[70020]: DEBUG oslo_vmware.api [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Waiting for the task: (returnval){ [ 625.450994] env[70020]: value = "task-3617702" [ 625.450994] env[70020]: _type = "Task" [ 625.450994] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.459249] env[70020]: DEBUG oslo_vmware.api [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Task: {'id': task-3617702, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.478012] env[70020]: DEBUG nova.compute.manager [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 625.513440] env[70020]: DEBUG nova.virt.hardware [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 625.513647] env[70020]: DEBUG nova.virt.hardware [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 625.513815] env[70020]: DEBUG nova.virt.hardware [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 625.514008] env[70020]: DEBUG nova.virt.hardware [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 625.514189] env[70020]: DEBUG nova.virt.hardware [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 625.514339] env[70020]: DEBUG nova.virt.hardware [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 625.514569] env[70020]: DEBUG nova.virt.hardware [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 625.514743] env[70020]: DEBUG nova.virt.hardware [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 625.514917] env[70020]: DEBUG nova.virt.hardware [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 625.515101] env[70020]: DEBUG nova.virt.hardware [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 625.515280] env[70020]: DEBUG nova.virt.hardware [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 625.516576] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc312d17-9ff2-4b8b-84da-331d03b1fd8f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.531413] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2838cfbf-5702-4242-b48f-83b5e964f9d6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.537182] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617694, 'name': CreateVM_Task, 'duration_secs': 0.541981} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.537491] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 625.539056] env[70020]: DEBUG oslo_concurrency.lockutils [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.539376] env[70020]: DEBUG oslo_concurrency.lockutils [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 625.539817] env[70020]: DEBUG oslo_concurrency.lockutils [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 625.548437] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d711df7-cefc-4eb7-8ae5-ebcb8d7151db {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.553568] env[70020]: DEBUG oslo_vmware.api [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Waiting for the task: (returnval){ [ 625.553568] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]529893df-a3b2-166b-0b9c-3c726cc09b9b" [ 625.553568] env[70020]: _type = "Task" [ 625.553568] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.561865] env[70020]: DEBUG oslo_vmware.api [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]529893df-a3b2-166b-0b9c-3c726cc09b9b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.636070] env[70020]: DEBUG nova.network.neutron [-] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.827292] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617701, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.889018] env[70020]: DEBUG oslo_concurrency.lockutils [req-c4f597f0-3f64-4b76-afa6-8a6a82f3d200 req-f46f0915-d9fd-44ce-b9f1-6fd6bf30b6af service nova] Releasing lock "refresh_cache-372e5569-8824-4841-b3d6-4b07423c7b3d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 625.928385] env[70020]: DEBUG oslo_concurrency.lockutils [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.477s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 625.928634] env[70020]: DEBUG nova.compute.manager [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 625.931022] env[70020]: DEBUG oslo_vmware.api [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Task: {'id': task-3617698, 'name': Rename_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.931375] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bfa7f5ef-38db-4e9f-ae38-739bce9c49a2 tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.875s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 625.931941] env[70020]: DEBUG nova.objects.instance [None req-bfa7f5ef-38db-4e9f-ae38-739bce9c49a2 tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Lazy-loading 'resources' on Instance uuid 01773af2-4ce2-4d2a-b334-ab99348000a5 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 625.961625] env[70020]: DEBUG oslo_vmware.api [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Task: {'id': task-3617702, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.007998] env[70020]: DEBUG nova.compute.manager [req-19fa77f6-96d6-48b4-be3c-dcb97d32b89c req-895976b5-1984-44c6-b219-d089765b56fc service nova] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Received event network-changed-3af5d84e-e814-4689-aa70-e63d58041799 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 626.008377] env[70020]: DEBUG nova.compute.manager [req-19fa77f6-96d6-48b4-be3c-dcb97d32b89c req-895976b5-1984-44c6-b219-d089765b56fc service nova] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Refreshing instance network info cache due to event network-changed-3af5d84e-e814-4689-aa70-e63d58041799. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 626.008599] env[70020]: DEBUG oslo_concurrency.lockutils [req-19fa77f6-96d6-48b4-be3c-dcb97d32b89c req-895976b5-1984-44c6-b219-d089765b56fc service nova] Acquiring lock "refresh_cache-bb4e4986-af2a-4832-9ec7-777bca863dce" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.008738] env[70020]: DEBUG oslo_concurrency.lockutils [req-19fa77f6-96d6-48b4-be3c-dcb97d32b89c req-895976b5-1984-44c6-b219-d089765b56fc service nova] Acquired lock "refresh_cache-bb4e4986-af2a-4832-9ec7-777bca863dce" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 626.009089] env[70020]: DEBUG nova.network.neutron [req-19fa77f6-96d6-48b4-be3c-dcb97d32b89c req-895976b5-1984-44c6-b219-d089765b56fc service nova] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Refreshing network info cache for port 3af5d84e-e814-4689-aa70-e63d58041799 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 626.067860] env[70020]: DEBUG oslo_vmware.api [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]529893df-a3b2-166b-0b9c-3c726cc09b9b, 'name': SearchDatastore_Task, 'duration_secs': 0.009288} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.068275] env[70020]: DEBUG oslo_concurrency.lockutils [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 626.068524] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 626.068766] env[70020]: DEBUG oslo_concurrency.lockutils [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.068911] env[70020]: DEBUG oslo_concurrency.lockutils [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 626.069098] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 626.069354] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c8c9cc9d-7dbb-4d52-a975-80f8537909e0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.078474] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 626.082283] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 626.083062] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57545468-5542-45d6-afd8-f1d2e718dac6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.089685] env[70020]: DEBUG oslo_vmware.api [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Waiting for the task: (returnval){ [ 626.089685] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5264df59-b936-82ba-b4e1-a7da60e4371b" [ 626.089685] env[70020]: _type = "Task" [ 626.089685] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.096784] env[70020]: DEBUG oslo_vmware.api [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5264df59-b936-82ba-b4e1-a7da60e4371b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.139389] env[70020]: INFO nova.compute.manager [-] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Took 1.28 seconds to deallocate network for instance. [ 626.332145] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617701, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.430680] env[70020]: DEBUG oslo_vmware.api [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Task: {'id': task-3617698, 'name': Rename_Task, 'duration_secs': 1.263717} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.434504] env[70020]: DEBUG nova.virt.hardware [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 626.434504] env[70020]: DEBUG nova.virt.hardware [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 626.434725] env[70020]: DEBUG nova.virt.hardware [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 626.434821] env[70020]: DEBUG nova.virt.hardware [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 626.434960] env[70020]: DEBUG nova.virt.hardware [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 626.435112] env[70020]: DEBUG nova.virt.hardware [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 626.435311] env[70020]: DEBUG nova.virt.hardware [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 626.435461] env[70020]: DEBUG nova.virt.hardware [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 626.435657] env[70020]: DEBUG nova.virt.hardware [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 626.436402] env[70020]: DEBUG nova.virt.hardware [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 626.436402] env[70020]: DEBUG nova.virt.hardware [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 626.436402] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 626.442191] env[70020]: DEBUG nova.compute.utils [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 626.446951] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e7632d9-5863-4011-b442-8c16ba7ff180 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.452025] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bbd73af1-a90d-4198-8dc3-98936575bddf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.455440] env[70020]: DEBUG nova.compute.manager [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 626.455726] env[70020]: DEBUG nova.network.neutron [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 626.473025] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2571786-b8a4-41ec-a405-a1a1c4a8f87f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.479521] env[70020]: DEBUG oslo_vmware.api [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Waiting for the task: (returnval){ [ 626.479521] env[70020]: value = "task-3617703" [ 626.479521] env[70020]: _type = "Task" [ 626.479521] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.479724] env[70020]: DEBUG oslo_vmware.api [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Task: {'id': task-3617702, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.494503] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Instance VIF info [] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 626.500990] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 626.504822] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 626.505503] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bed7c7ec-c114-47e5-99f7-c810b09f5ddc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.523180] env[70020]: DEBUG nova.network.neutron [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Successfully updated port: 4e709a63-45c3-48e8-8762-26e149c61266 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 626.527120] env[70020]: DEBUG oslo_vmware.api [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Task: {'id': task-3617703, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.533926] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 626.533926] env[70020]: value = "task-3617704" [ 626.533926] env[70020]: _type = "Task" [ 626.533926] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.544537] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617704, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.560757] env[70020]: DEBUG nova.policy [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c0e22e21d3684201883adc3617ddee72', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b3a2dc07c1d447ea81ca142d80ab4210', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 626.600473] env[70020]: DEBUG oslo_vmware.api [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5264df59-b936-82ba-b4e1-a7da60e4371b, 'name': SearchDatastore_Task, 'duration_secs': 0.008825} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.607275] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1ae7463-a9b9-43b9-87e1-77b285b86147 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.613117] env[70020]: DEBUG oslo_vmware.api [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Waiting for the task: (returnval){ [ 626.613117] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52d3120b-0156-bb94-4e65-be5c15881a5c" [ 626.613117] env[70020]: _type = "Task" [ 626.613117] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.625021] env[70020]: DEBUG oslo_vmware.api [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52d3120b-0156-bb94-4e65-be5c15881a5c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.646397] env[70020]: DEBUG oslo_concurrency.lockutils [None req-913786dc-d745-4d54-a251-6dc9c1a60ea9 tempest-DeleteServersAdminTestJSON-409072902 tempest-DeleteServersAdminTestJSON-409072902-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 626.807664] env[70020]: DEBUG nova.network.neutron [req-19fa77f6-96d6-48b4-be3c-dcb97d32b89c req-895976b5-1984-44c6-b219-d089765b56fc service nova] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Updated VIF entry in instance network info cache for port 3af5d84e-e814-4689-aa70-e63d58041799. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 626.808014] env[70020]: DEBUG nova.network.neutron [req-19fa77f6-96d6-48b4-be3c-dcb97d32b89c req-895976b5-1984-44c6-b219-d089765b56fc service nova] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Updating instance_info_cache with network_info: [{"id": "3af5d84e-e814-4689-aa70-e63d58041799", "address": "fa:16:3e:05:20:3a", "network": {"id": "b4313295-7611-4fc1-b8ba-667ae1e29303", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1683239800-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "85abf8ca8009465c87e931b0e9d0fe96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1931669-8959-4e86-a603-e206bcf2b47a", "external-id": "nsx-vlan-transportzone-937", "segmentation_id": 937, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3af5d84e-e8", "ovs_interfaceid": "3af5d84e-e814-4689-aa70-e63d58041799", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.822609] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d13c5f81-69ed-4787-b577-d21b50b1c812 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.836666] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29ab2a78-16e4-4d06-b24c-09a165017bc0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.840557] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617701, 'name': CreateVM_Task, 'duration_secs': 1.348733} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.840557] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 626.843781] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.843781] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 626.843781] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 626.843781] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72b98672-6ec4-4404-ab89-9f24b31ca2ab {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.871376] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f890366f-95c0-419f-8d5f-dc41a22f4142 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.875729] env[70020]: DEBUG oslo_vmware.api [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Waiting for the task: (returnval){ [ 626.875729] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5233012f-bd01-42be-6d59-dfb0e53e3fab" [ 626.875729] env[70020]: _type = "Task" [ 626.875729] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.882664] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12da9bfb-5b9b-429d-af4e-1e345905c29e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.889888] env[70020]: DEBUG oslo_vmware.api [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5233012f-bd01-42be-6d59-dfb0e53e3fab, 'name': SearchDatastore_Task, 'duration_secs': 0.009503} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.890673] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 626.891134] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 626.891387] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.900865] env[70020]: DEBUG nova.compute.provider_tree [None req-bfa7f5ef-38db-4e9f-ae38-739bce9c49a2 tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 626.953067] env[70020]: DEBUG nova.compute.manager [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 626.968153] env[70020]: DEBUG oslo_vmware.api [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Task: {'id': task-3617702, 'name': ReconfigVM_Task, 'duration_secs': 1.210273} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.968402] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Reconfigured VM instance instance-00000009 to attach disk [datastore2] 372e5569-8824-4841-b3d6-4b07423c7b3d/372e5569-8824-4841-b3d6-4b07423c7b3d.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 626.969078] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-65e59b16-06b8-4f6c-a545-099e39b6fd91 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.977275] env[70020]: DEBUG oslo_vmware.api [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Waiting for the task: (returnval){ [ 626.977275] env[70020]: value = "task-3617705" [ 626.977275] env[70020]: _type = "Task" [ 626.977275] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.986581] env[70020]: DEBUG oslo_vmware.api [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Task: {'id': task-3617705, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.995629] env[70020]: DEBUG oslo_vmware.api [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Task: {'id': task-3617703, 'name': PowerOnVM_Task} progress is 81%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.028029] env[70020]: DEBUG oslo_concurrency.lockutils [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Acquiring lock "refresh_cache-d601179a-df77-4f2e-b8df-9185b8a485e3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.028183] env[70020]: DEBUG oslo_concurrency.lockutils [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Acquired lock "refresh_cache-d601179a-df77-4f2e-b8df-9185b8a485e3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 627.028328] env[70020]: DEBUG nova.network.neutron [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 627.045165] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617704, 'name': CreateVM_Task, 'duration_secs': 0.301958} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.045328] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 627.045733] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.045875] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 627.046348] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 627.046468] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72cec149-e00c-4d92-b3e5-7253ccd0236a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.061359] env[70020]: DEBUG oslo_vmware.api [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Waiting for the task: (returnval){ [ 627.061359] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52d85f8e-904a-e0ba-a4cb-84bf136c74bd" [ 627.061359] env[70020]: _type = "Task" [ 627.061359] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.076847] env[70020]: DEBUG oslo_vmware.api [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52d85f8e-904a-e0ba-a4cb-84bf136c74bd, 'name': SearchDatastore_Task, 'duration_secs': 0.009119} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.076847] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 627.076847] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 627.076847] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.127924] env[70020]: DEBUG oslo_vmware.api [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52d3120b-0156-bb94-4e65-be5c15881a5c, 'name': SearchDatastore_Task, 'duration_secs': 0.009939} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.128088] env[70020]: DEBUG oslo_concurrency.lockutils [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 627.128331] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] a0b4a0b0-748d-46eb-9e39-3f21e394c090/a0b4a0b0-748d-46eb-9e39-3f21e394c090.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 627.128608] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 627.128796] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 627.129137] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-75c4daba-42bb-402d-96b0-c786b3ca7b50 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.131352] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a5b6898c-6c29-493c-a9cc-bf690ada1128 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.139696] env[70020]: DEBUG oslo_vmware.api [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Waiting for the task: (returnval){ [ 627.139696] env[70020]: value = "task-3617706" [ 627.139696] env[70020]: _type = "Task" [ 627.139696] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.149488] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 627.149676] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 627.151254] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80709ac2-6973-48ab-b3f5-5f28ed200345 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.159389] env[70020]: DEBUG oslo_vmware.api [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Task: {'id': task-3617706, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.163913] env[70020]: DEBUG oslo_vmware.api [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Waiting for the task: (returnval){ [ 627.163913] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52cfcfb1-676c-102a-7e50-5a5a44dcfbe9" [ 627.163913] env[70020]: _type = "Task" [ 627.163913] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.176536] env[70020]: DEBUG oslo_vmware.api [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52cfcfb1-676c-102a-7e50-5a5a44dcfbe9, 'name': SearchDatastore_Task, 'duration_secs': 0.009822} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.177548] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-529a9d11-6743-4bcb-9f93-1b6f14b37172 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.184699] env[70020]: DEBUG oslo_vmware.api [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Waiting for the task: (returnval){ [ 627.184699] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5236cb09-7809-d3fb-0709-061638ff472f" [ 627.184699] env[70020]: _type = "Task" [ 627.184699] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.195014] env[70020]: DEBUG oslo_vmware.api [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5236cb09-7809-d3fb-0709-061638ff472f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.240849] env[70020]: DEBUG nova.network.neutron [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Successfully created port: 6348da2f-b0bd-499f-bf5e-b14a38d29438 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 627.312069] env[70020]: DEBUG oslo_concurrency.lockutils [req-19fa77f6-96d6-48b4-be3c-dcb97d32b89c req-895976b5-1984-44c6-b219-d089765b56fc service nova] Releasing lock "refresh_cache-bb4e4986-af2a-4832-9ec7-777bca863dce" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 627.312349] env[70020]: DEBUG nova.compute.manager [req-19fa77f6-96d6-48b4-be3c-dcb97d32b89c req-895976b5-1984-44c6-b219-d089765b56fc service nova] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Received event network-vif-plugged-505b9f3d-c597-4acb-8477-fd64b8ea5de1 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 627.312549] env[70020]: DEBUG oslo_concurrency.lockutils [req-19fa77f6-96d6-48b4-be3c-dcb97d32b89c req-895976b5-1984-44c6-b219-d089765b56fc service nova] Acquiring lock "a0b4a0b0-748d-46eb-9e39-3f21e394c090-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 627.312732] env[70020]: DEBUG oslo_concurrency.lockutils [req-19fa77f6-96d6-48b4-be3c-dcb97d32b89c req-895976b5-1984-44c6-b219-d089765b56fc service nova] Lock "a0b4a0b0-748d-46eb-9e39-3f21e394c090-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 627.312882] env[70020]: DEBUG oslo_concurrency.lockutils [req-19fa77f6-96d6-48b4-be3c-dcb97d32b89c req-895976b5-1984-44c6-b219-d089765b56fc service nova] Lock "a0b4a0b0-748d-46eb-9e39-3f21e394c090-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 627.313052] env[70020]: DEBUG nova.compute.manager [req-19fa77f6-96d6-48b4-be3c-dcb97d32b89c req-895976b5-1984-44c6-b219-d089765b56fc service nova] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] No waiting events found dispatching network-vif-plugged-505b9f3d-c597-4acb-8477-fd64b8ea5de1 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 627.313210] env[70020]: WARNING nova.compute.manager [req-19fa77f6-96d6-48b4-be3c-dcb97d32b89c req-895976b5-1984-44c6-b219-d089765b56fc service nova] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Received unexpected event network-vif-plugged-505b9f3d-c597-4acb-8477-fd64b8ea5de1 for instance with vm_state building and task_state spawning. [ 627.313361] env[70020]: DEBUG nova.compute.manager [req-19fa77f6-96d6-48b4-be3c-dcb97d32b89c req-895976b5-1984-44c6-b219-d089765b56fc service nova] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Received event network-changed-505b9f3d-c597-4acb-8477-fd64b8ea5de1 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 627.314733] env[70020]: DEBUG nova.compute.manager [req-19fa77f6-96d6-48b4-be3c-dcb97d32b89c req-895976b5-1984-44c6-b219-d089765b56fc service nova] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Refreshing instance network info cache due to event network-changed-505b9f3d-c597-4acb-8477-fd64b8ea5de1. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 627.314733] env[70020]: DEBUG oslo_concurrency.lockutils [req-19fa77f6-96d6-48b4-be3c-dcb97d32b89c req-895976b5-1984-44c6-b219-d089765b56fc service nova] Acquiring lock "refresh_cache-a0b4a0b0-748d-46eb-9e39-3f21e394c090" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.314733] env[70020]: DEBUG oslo_concurrency.lockutils [req-19fa77f6-96d6-48b4-be3c-dcb97d32b89c req-895976b5-1984-44c6-b219-d089765b56fc service nova] Acquired lock "refresh_cache-a0b4a0b0-748d-46eb-9e39-3f21e394c090" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 627.314733] env[70020]: DEBUG nova.network.neutron [req-19fa77f6-96d6-48b4-be3c-dcb97d32b89c req-895976b5-1984-44c6-b219-d089765b56fc service nova] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Refreshing network info cache for port 505b9f3d-c597-4acb-8477-fd64b8ea5de1 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 627.404069] env[70020]: DEBUG nova.scheduler.client.report [None req-bfa7f5ef-38db-4e9f-ae38-739bce9c49a2 tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 627.490058] env[70020]: DEBUG oslo_vmware.api [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Task: {'id': task-3617705, 'name': Rename_Task, 'duration_secs': 0.245895} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.494108] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 627.494470] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8128ef7b-fa90-4843-a1fd-92615e978292 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.504238] env[70020]: DEBUG oslo_vmware.api [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Task: {'id': task-3617703, 'name': PowerOnVM_Task, 'duration_secs': 0.74754} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.506799] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 627.507078] env[70020]: INFO nova.compute.manager [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Took 11.53 seconds to spawn the instance on the hypervisor. [ 627.507784] env[70020]: DEBUG nova.compute.manager [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 627.507784] env[70020]: DEBUG oslo_vmware.api [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Waiting for the task: (returnval){ [ 627.507784] env[70020]: value = "task-3617707" [ 627.507784] env[70020]: _type = "Task" [ 627.507784] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.508398] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-522783b2-6861-4cd3-996a-d33724cc50d3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.534854] env[70020]: DEBUG oslo_vmware.api [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Task: {'id': task-3617707, 'name': PowerOnVM_Task} progress is 33%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.642994] env[70020]: DEBUG nova.network.neutron [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 627.651680] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9d9e1da6-68b7-4cf8-80ac-f6587328453a tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Acquiring lock "0cc49db6-1574-4e51-8692-b79ee14bc25d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 627.652435] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9d9e1da6-68b7-4cf8-80ac-f6587328453a tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Lock "0cc49db6-1574-4e51-8692-b79ee14bc25d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 627.652435] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9d9e1da6-68b7-4cf8-80ac-f6587328453a tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Acquiring lock "0cc49db6-1574-4e51-8692-b79ee14bc25d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 627.652551] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9d9e1da6-68b7-4cf8-80ac-f6587328453a tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Lock "0cc49db6-1574-4e51-8692-b79ee14bc25d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 627.652711] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9d9e1da6-68b7-4cf8-80ac-f6587328453a tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Lock "0cc49db6-1574-4e51-8692-b79ee14bc25d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 627.658362] env[70020]: DEBUG oslo_vmware.api [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Task: {'id': task-3617706, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.49925} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.658824] env[70020]: INFO nova.compute.manager [None req-9d9e1da6-68b7-4cf8-80ac-f6587328453a tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Terminating instance [ 627.660330] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] a0b4a0b0-748d-46eb-9e39-3f21e394c090/a0b4a0b0-748d-46eb-9e39-3f21e394c090.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 627.660527] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 627.662303] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e95b078d-c989-478e-ac72-e9899f2aebc4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.670160] env[70020]: DEBUG oslo_vmware.api [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Waiting for the task: (returnval){ [ 627.670160] env[70020]: value = "task-3617708" [ 627.670160] env[70020]: _type = "Task" [ 627.670160] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.686532] env[70020]: DEBUG oslo_vmware.api [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Task: {'id': task-3617708, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.701982] env[70020]: DEBUG oslo_vmware.api [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5236cb09-7809-d3fb-0709-061638ff472f, 'name': SearchDatastore_Task, 'duration_secs': 0.0099} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.705773] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 627.706226] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 79d98176-b566-4349-ad10-c2ea6fdbc657/79d98176-b566-4349-ad10-c2ea6fdbc657.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 627.706685] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 627.706962] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 627.707816] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-51d4d2d6-c55a-43bf-bf16-89fecb25b56e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.710482] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4011fc33-e87d-4405-9b54-9a95a49d581c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.716994] env[70020]: DEBUG oslo_vmware.api [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Waiting for the task: (returnval){ [ 627.716994] env[70020]: value = "task-3617709" [ 627.716994] env[70020]: _type = "Task" [ 627.716994] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.721522] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 627.722115] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 627.722905] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de1d5b56-f3b5-42d9-924b-acd93c74709a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.728093] env[70020]: DEBUG oslo_vmware.api [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Task: {'id': task-3617709, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.730928] env[70020]: DEBUG oslo_vmware.api [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Waiting for the task: (returnval){ [ 627.730928] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52b7653e-c6bf-bd0c-3702-400015d48e84" [ 627.730928] env[70020]: _type = "Task" [ 627.730928] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.741521] env[70020]: DEBUG oslo_vmware.api [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b7653e-c6bf-bd0c-3702-400015d48e84, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.910994] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bfa7f5ef-38db-4e9f-ae38-739bce9c49a2 tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.977s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 627.916449] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.131s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 627.921897] env[70020]: INFO nova.compute.claims [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 627.940497] env[70020]: INFO nova.scheduler.client.report [None req-bfa7f5ef-38db-4e9f-ae38-739bce9c49a2 tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Deleted allocations for instance 01773af2-4ce2-4d2a-b334-ab99348000a5 [ 627.964078] env[70020]: DEBUG nova.compute.manager [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 628.001036] env[70020]: DEBUG nova.virt.hardware [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 628.003119] env[70020]: DEBUG nova.virt.hardware [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 628.003119] env[70020]: DEBUG nova.virt.hardware [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 628.003119] env[70020]: DEBUG nova.virt.hardware [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 628.003119] env[70020]: DEBUG nova.virt.hardware [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 628.003347] env[70020]: DEBUG nova.virt.hardware [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 628.003387] env[70020]: DEBUG nova.virt.hardware [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 628.003547] env[70020]: DEBUG nova.virt.hardware [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 628.003738] env[70020]: DEBUG nova.virt.hardware [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 628.003943] env[70020]: DEBUG nova.virt.hardware [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 628.004140] env[70020]: DEBUG nova.virt.hardware [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 628.005757] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15085454-4126-453e-8feb-1303e0fc5691 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.026758] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e0aa899-57d5-4aa2-8a2c-88a0fc17d0f4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.033208] env[70020]: DEBUG oslo_vmware.api [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Task: {'id': task-3617707, 'name': PowerOnVM_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.056774] env[70020]: INFO nova.compute.manager [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Took 19.87 seconds to build instance. [ 628.076022] env[70020]: DEBUG nova.compute.manager [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 628.078363] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc639b5a-9052-4158-852a-903a99a9c20d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.105125] env[70020]: DEBUG nova.network.neutron [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Updating instance_info_cache with network_info: [{"id": "4e709a63-45c3-48e8-8762-26e149c61266", "address": "fa:16:3e:ef:1c:11", "network": {"id": "b284478c-c5d9-48ae-8f9e-db4ed9b7d525", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-354423789-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "add37b0346e74e7f9724e69253e2cffc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b931c4c-f73c-4fbd-9c9f-0270834cc69e", "external-id": "nsx-vlan-transportzone-177", "segmentation_id": 177, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e709a63-45", "ovs_interfaceid": "4e709a63-45c3-48e8-8762-26e149c61266", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.166252] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9d9e1da6-68b7-4cf8-80ac-f6587328453a tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Acquiring lock "refresh_cache-0cc49db6-1574-4e51-8692-b79ee14bc25d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.166252] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9d9e1da6-68b7-4cf8-80ac-f6587328453a tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Acquired lock "refresh_cache-0cc49db6-1574-4e51-8692-b79ee14bc25d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 628.166252] env[70020]: DEBUG nova.network.neutron [None req-9d9e1da6-68b7-4cf8-80ac-f6587328453a tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 628.184822] env[70020]: DEBUG oslo_vmware.api [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Task: {'id': task-3617708, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068691} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.187364] env[70020]: DEBUG nova.network.neutron [req-19fa77f6-96d6-48b4-be3c-dcb97d32b89c req-895976b5-1984-44c6-b219-d089765b56fc service nova] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Updated VIF entry in instance network info cache for port 505b9f3d-c597-4acb-8477-fd64b8ea5de1. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 628.187364] env[70020]: DEBUG nova.network.neutron [req-19fa77f6-96d6-48b4-be3c-dcb97d32b89c req-895976b5-1984-44c6-b219-d089765b56fc service nova] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Updating instance_info_cache with network_info: [{"id": "505b9f3d-c597-4acb-8477-fd64b8ea5de1", "address": "fa:16:3e:0d:e7:c0", "network": {"id": "665d0c23-de7f-46c9-b7d3-0e8705495b1b", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-2043355770-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "b6039c5c57424a98bbbc8c0f38e9741f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7cd4cea-788c-4e6d-9df8-5d83838e2e2a", "external-id": "nsx-vlan-transportzone-361", "segmentation_id": 361, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap505b9f3d-c5", "ovs_interfaceid": "505b9f3d-c597-4acb-8477-fd64b8ea5de1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.188275] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 628.191571] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b90818ea-e855-44c6-8fa0-2414ce9ab260 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.219130] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Reconfiguring VM instance instance-0000000a to attach disk [datastore2] a0b4a0b0-748d-46eb-9e39-3f21e394c090/a0b4a0b0-748d-46eb-9e39-3f21e394c090.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 628.219873] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2095b56f-cc44-40d5-8784-00f121087087 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.249870] env[70020]: DEBUG oslo_vmware.api [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b7653e-c6bf-bd0c-3702-400015d48e84, 'name': SearchDatastore_Task, 'duration_secs': 0.016563} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.252393] env[70020]: DEBUG oslo_vmware.api [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Task: {'id': task-3617709, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.484776} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.257196] env[70020]: DEBUG oslo_vmware.api [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Waiting for the task: (returnval){ [ 628.257196] env[70020]: value = "task-3617710" [ 628.257196] env[70020]: _type = "Task" [ 628.257196] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.257196] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a97a162-cc06-48df-9874-e97587c06be7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.257196] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 79d98176-b566-4349-ad10-c2ea6fdbc657/79d98176-b566-4349-ad10-c2ea6fdbc657.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 628.257196] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 628.257196] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0e015b7f-1cf7-4ab5-a375-160f2c9b94cb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.264181] env[70020]: DEBUG oslo_vmware.api [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Waiting for the task: (returnval){ [ 628.264181] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5269623a-7b0a-90d0-ca9c-bfa35a223f46" [ 628.264181] env[70020]: _type = "Task" [ 628.264181] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.271142] env[70020]: DEBUG oslo_vmware.api [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Task: {'id': task-3617710, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.271882] env[70020]: DEBUG oslo_vmware.api [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Waiting for the task: (returnval){ [ 628.271882] env[70020]: value = "task-3617711" [ 628.271882] env[70020]: _type = "Task" [ 628.271882] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.281072] env[70020]: DEBUG oslo_vmware.api [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5269623a-7b0a-90d0-ca9c-bfa35a223f46, 'name': SearchDatastore_Task, 'duration_secs': 0.010518} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.281812] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 628.282131] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] b0b825d4-534d-4d54-a0c4-b9e507726c47/b0b825d4-534d-4d54-a0c4-b9e507726c47.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 628.282386] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-88deb3e3-15ee-4e28-b0c8-79d639e37b46 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.287452] env[70020]: DEBUG oslo_vmware.api [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Task: {'id': task-3617711, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.292488] env[70020]: DEBUG oslo_vmware.api [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Waiting for the task: (returnval){ [ 628.292488] env[70020]: value = "task-3617712" [ 628.292488] env[70020]: _type = "Task" [ 628.292488] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.306359] env[70020]: DEBUG oslo_vmware.api [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': task-3617712, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.326792] env[70020]: DEBUG nova.compute.manager [req-5014693a-210e-4ec7-8f11-3198162276af req-354a0d7a-3e85-4ae7-ba0e-32bd25bcbba2 service nova] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Received event network-vif-plugged-16a8d745-ea55-4e94-9513-0b5547738678 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 628.327040] env[70020]: DEBUG oslo_concurrency.lockutils [req-5014693a-210e-4ec7-8f11-3198162276af req-354a0d7a-3e85-4ae7-ba0e-32bd25bcbba2 service nova] Acquiring lock "79d98176-b566-4349-ad10-c2ea6fdbc657-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 628.327229] env[70020]: DEBUG oslo_concurrency.lockutils [req-5014693a-210e-4ec7-8f11-3198162276af req-354a0d7a-3e85-4ae7-ba0e-32bd25bcbba2 service nova] Lock "79d98176-b566-4349-ad10-c2ea6fdbc657-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 628.327394] env[70020]: DEBUG oslo_concurrency.lockutils [req-5014693a-210e-4ec7-8f11-3198162276af req-354a0d7a-3e85-4ae7-ba0e-32bd25bcbba2 service nova] Lock "79d98176-b566-4349-ad10-c2ea6fdbc657-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 628.327559] env[70020]: DEBUG nova.compute.manager [req-5014693a-210e-4ec7-8f11-3198162276af req-354a0d7a-3e85-4ae7-ba0e-32bd25bcbba2 service nova] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] No waiting events found dispatching network-vif-plugged-16a8d745-ea55-4e94-9513-0b5547738678 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 628.327719] env[70020]: WARNING nova.compute.manager [req-5014693a-210e-4ec7-8f11-3198162276af req-354a0d7a-3e85-4ae7-ba0e-32bd25bcbba2 service nova] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Received unexpected event network-vif-plugged-16a8d745-ea55-4e94-9513-0b5547738678 for instance with vm_state building and task_state spawning. [ 628.327872] env[70020]: DEBUG nova.compute.manager [req-5014693a-210e-4ec7-8f11-3198162276af req-354a0d7a-3e85-4ae7-ba0e-32bd25bcbba2 service nova] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Received event network-changed-16a8d745-ea55-4e94-9513-0b5547738678 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 628.328041] env[70020]: DEBUG nova.compute.manager [req-5014693a-210e-4ec7-8f11-3198162276af req-354a0d7a-3e85-4ae7-ba0e-32bd25bcbba2 service nova] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Refreshing instance network info cache due to event network-changed-16a8d745-ea55-4e94-9513-0b5547738678. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 628.328233] env[70020]: DEBUG oslo_concurrency.lockutils [req-5014693a-210e-4ec7-8f11-3198162276af req-354a0d7a-3e85-4ae7-ba0e-32bd25bcbba2 service nova] Acquiring lock "refresh_cache-79d98176-b566-4349-ad10-c2ea6fdbc657" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.328389] env[70020]: DEBUG oslo_concurrency.lockutils [req-5014693a-210e-4ec7-8f11-3198162276af req-354a0d7a-3e85-4ae7-ba0e-32bd25bcbba2 service nova] Acquired lock "refresh_cache-79d98176-b566-4349-ad10-c2ea6fdbc657" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 628.328553] env[70020]: DEBUG nova.network.neutron [req-5014693a-210e-4ec7-8f11-3198162276af req-354a0d7a-3e85-4ae7-ba0e-32bd25bcbba2 service nova] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Refreshing network info cache for port 16a8d745-ea55-4e94-9513-0b5547738678 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 628.449510] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bfa7f5ef-38db-4e9f-ae38-739bce9c49a2 tempest-ServerDiagnosticsNegativeTest-1667465715 tempest-ServerDiagnosticsNegativeTest-1667465715-project-member] Lock "01773af2-4ce2-4d2a-b334-ab99348000a5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.616s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 628.524759] env[70020]: DEBUG oslo_vmware.api [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Task: {'id': task-3617707, 'name': PowerOnVM_Task, 'duration_secs': 0.6691} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.524953] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 628.525844] env[70020]: INFO nova.compute.manager [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Took 10.17 seconds to spawn the instance on the hypervisor. [ 628.526172] env[70020]: DEBUG nova.compute.manager [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 628.527172] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7413d19f-3e88-4437-acb0-d57e43ce567b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.560465] env[70020]: DEBUG oslo_concurrency.lockutils [None req-06697e92-9b64-4c03-ae3b-e0e4e9224621 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Lock "bb4e4986-af2a-4832-9ec7-777bca863dce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.381s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 628.596499] env[70020]: INFO nova.compute.manager [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] instance snapshotting [ 628.596725] env[70020]: WARNING nova.compute.manager [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 628.602142] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5527b556-c2dd-4ec7-bdc9-a2bf878ce65d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.608363] env[70020]: DEBUG oslo_concurrency.lockutils [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Releasing lock "refresh_cache-d601179a-df77-4f2e-b8df-9185b8a485e3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 628.608850] env[70020]: DEBUG nova.compute.manager [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Instance network_info: |[{"id": "4e709a63-45c3-48e8-8762-26e149c61266", "address": "fa:16:3e:ef:1c:11", "network": {"id": "b284478c-c5d9-48ae-8f9e-db4ed9b7d525", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-354423789-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "add37b0346e74e7f9724e69253e2cffc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b931c4c-f73c-4fbd-9c9f-0270834cc69e", "external-id": "nsx-vlan-transportzone-177", "segmentation_id": 177, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e709a63-45", "ovs_interfaceid": "4e709a63-45c3-48e8-8762-26e149c61266", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 628.609767] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ef:1c:11', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2b931c4c-f73c-4fbd-9c9f-0270834cc69e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4e709a63-45c3-48e8-8762-26e149c61266', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 628.624700] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Creating folder: Project (add37b0346e74e7f9724e69253e2cffc). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 628.625441] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8b957a1f-5658-4cad-b888-639e52064a7e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.646637] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc7a0842-fa13-4763-8b0a-9cff0dcbbdd0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.658333] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Created folder: Project (add37b0346e74e7f9724e69253e2cffc) in parent group-v721521. [ 628.658593] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Creating folder: Instances. Parent ref: group-v721556. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 628.659121] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-34f5be5c-fd73-4de6-8076-88a169f4657c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.671757] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Created folder: Instances in parent group-v721556. [ 628.672028] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 628.672272] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 628.672862] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7f577536-cf39-4463-bd43-5db7b9356d65 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.693411] env[70020]: DEBUG oslo_concurrency.lockutils [req-19fa77f6-96d6-48b4-be3c-dcb97d32b89c req-895976b5-1984-44c6-b219-d089765b56fc service nova] Releasing lock "refresh_cache-a0b4a0b0-748d-46eb-9e39-3f21e394c090" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 628.694845] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 628.694845] env[70020]: value = "task-3617715" [ 628.694845] env[70020]: _type = "Task" [ 628.694845] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.702967] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617715, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.710911] env[70020]: DEBUG nova.network.neutron [None req-9d9e1da6-68b7-4cf8-80ac-f6587328453a tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 628.768364] env[70020]: DEBUG oslo_vmware.api [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Task: {'id': task-3617710, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.782204] env[70020]: DEBUG oslo_vmware.api [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Task: {'id': task-3617711, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.359596} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.783118] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 628.783523] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6525fffb-db28-44ac-b5e4-f1e2ae78d2e7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.810048] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Reconfiguring VM instance instance-0000000b to attach disk [datastore2] 79d98176-b566-4349-ad10-c2ea6fdbc657/79d98176-b566-4349-ad10-c2ea6fdbc657.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 628.813575] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b57c4865-392d-4a12-9f2a-c9aa0d4b9176 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.834882] env[70020]: DEBUG oslo_vmware.api [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': task-3617712, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.836680] env[70020]: DEBUG oslo_vmware.api [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Waiting for the task: (returnval){ [ 628.836680] env[70020]: value = "task-3617716" [ 628.836680] env[70020]: _type = "Task" [ 628.836680] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.847072] env[70020]: DEBUG oslo_vmware.api [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Task: {'id': task-3617716, 'name': ReconfigVM_Task} progress is 10%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.856923] env[70020]: DEBUG nova.network.neutron [None req-9d9e1da6-68b7-4cf8-80ac-f6587328453a tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 629.057659] env[70020]: INFO nova.compute.manager [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Took 19.32 seconds to build instance. [ 629.063827] env[70020]: DEBUG nova.compute.manager [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 629.165180] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Creating Snapshot of the VM instance {{(pid=70020) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 629.165180] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-3f7134ed-6aa5-4bf3-b3d4-0b55ac1ef64b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.172025] env[70020]: DEBUG oslo_vmware.api [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 629.172025] env[70020]: value = "task-3617717" [ 629.172025] env[70020]: _type = "Task" [ 629.172025] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.182419] env[70020]: DEBUG oslo_vmware.api [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617717, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.206448] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617715, 'name': CreateVM_Task, 'duration_secs': 0.432868} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.206824] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 629.207757] env[70020]: DEBUG oslo_concurrency.lockutils [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.209081] env[70020]: DEBUG oslo_concurrency.lockutils [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 629.209081] env[70020]: DEBUG oslo_concurrency.lockutils [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 629.209081] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1d2627d-50d8-4409-b01c-e37ff78caaa6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.216342] env[70020]: DEBUG oslo_vmware.api [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Waiting for the task: (returnval){ [ 629.216342] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52262b1f-796e-a6a4-9280-fb775e021e19" [ 629.216342] env[70020]: _type = "Task" [ 629.216342] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.231421] env[70020]: DEBUG oslo_vmware.api [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52262b1f-796e-a6a4-9280-fb775e021e19, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.266871] env[70020]: DEBUG oslo_vmware.api [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Task: {'id': task-3617710, 'name': ReconfigVM_Task, 'duration_secs': 0.929327} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.267788] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Reconfigured VM instance instance-0000000a to attach disk [datastore2] a0b4a0b0-748d-46eb-9e39-3f21e394c090/a0b4a0b0-748d-46eb-9e39-3f21e394c090.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 629.268974] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75e63461-cd14-43e4-b21b-39e43018e41e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.271415] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-07c398b5-3667-4073-b504-3de051e07613 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.277211] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a3d829f-7afd-4060-baad-1b6895c0ed94 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.281976] env[70020]: DEBUG oslo_vmware.api [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Waiting for the task: (returnval){ [ 629.281976] env[70020]: value = "task-3617718" [ 629.281976] env[70020]: _type = "Task" [ 629.281976] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.322092] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b731698c-9fed-478b-95a0-73041b6dfeb1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.328849] env[70020]: DEBUG oslo_vmware.api [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Task: {'id': task-3617718, 'name': Rename_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.333930] env[70020]: DEBUG oslo_vmware.api [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': task-3617712, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.552227} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.336616] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] b0b825d4-534d-4d54-a0c4-b9e507726c47/b0b825d4-534d-4d54-a0c4-b9e507726c47.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 629.336616] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 629.336839] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8d6ff0cb-5456-4c93-9545-ae48b9bbfde8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.340113] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-848878bb-a5a5-4adc-a92b-f82b070d9b2f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.352183] env[70020]: DEBUG oslo_vmware.api [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Task: {'id': task-3617716, 'name': ReconfigVM_Task, 'duration_secs': 0.280658} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.361629] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Reconfigured VM instance instance-0000000b to attach disk [datastore2] 79d98176-b566-4349-ad10-c2ea6fdbc657/79d98176-b566-4349-ad10-c2ea6fdbc657.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 629.362551] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9d9e1da6-68b7-4cf8-80ac-f6587328453a tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Releasing lock "refresh_cache-0cc49db6-1574-4e51-8692-b79ee14bc25d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 629.363126] env[70020]: DEBUG nova.compute.manager [None req-9d9e1da6-68b7-4cf8-80ac-f6587328453a tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 629.363559] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9d9e1da6-68b7-4cf8-80ac-f6587328453a tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 629.363737] env[70020]: DEBUG nova.compute.provider_tree [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 629.369022] env[70020]: DEBUG oslo_vmware.api [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Waiting for the task: (returnval){ [ 629.369022] env[70020]: value = "task-3617719" [ 629.369022] env[70020]: _type = "Task" [ 629.369022] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.369022] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aeb62092-176b-4cc1-b673-7243bfece2be {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.369022] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c5a911d-0822-434c-89ad-ff0e9eeb64a2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.381689] env[70020]: DEBUG oslo_vmware.api [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': task-3617719, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.384765] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d9e1da6-68b7-4cf8-80ac-f6587328453a tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 629.384948] env[70020]: DEBUG oslo_vmware.api [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Waiting for the task: (returnval){ [ 629.384948] env[70020]: value = "task-3617720" [ 629.384948] env[70020]: _type = "Task" [ 629.384948] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.385143] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1de6bbd8-e6be-4968-b425-9e688d243677 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.395114] env[70020]: DEBUG oslo_vmware.api [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Task: {'id': task-3617720, 'name': Rename_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.396828] env[70020]: DEBUG oslo_vmware.api [None req-9d9e1da6-68b7-4cf8-80ac-f6587328453a tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Waiting for the task: (returnval){ [ 629.396828] env[70020]: value = "task-3617721" [ 629.396828] env[70020]: _type = "Task" [ 629.396828] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.405263] env[70020]: DEBUG oslo_vmware.api [None req-9d9e1da6-68b7-4cf8-80ac-f6587328453a tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Task: {'id': task-3617721, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.523180] env[70020]: DEBUG nova.compute.manager [req-82926c64-27b2-4e6f-bc44-9ba19d17b6e0 req-1e3f1eaa-8af4-48fc-95db-6810d66dd832 service nova] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Received event network-changed-c7c80204-b8b4-46c6-8d93-38d4879119d3 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 629.524340] env[70020]: DEBUG nova.compute.manager [req-82926c64-27b2-4e6f-bc44-9ba19d17b6e0 req-1e3f1eaa-8af4-48fc-95db-6810d66dd832 service nova] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Refreshing instance network info cache due to event network-changed-c7c80204-b8b4-46c6-8d93-38d4879119d3. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 629.524761] env[70020]: DEBUG oslo_concurrency.lockutils [req-82926c64-27b2-4e6f-bc44-9ba19d17b6e0 req-1e3f1eaa-8af4-48fc-95db-6810d66dd832 service nova] Acquiring lock "refresh_cache-6a114dce-7ed3-46e1-9d50-c3dd6efd340c" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.525069] env[70020]: DEBUG oslo_concurrency.lockutils [req-82926c64-27b2-4e6f-bc44-9ba19d17b6e0 req-1e3f1eaa-8af4-48fc-95db-6810d66dd832 service nova] Acquired lock "refresh_cache-6a114dce-7ed3-46e1-9d50-c3dd6efd340c" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 629.525427] env[70020]: DEBUG nova.network.neutron [req-82926c64-27b2-4e6f-bc44-9ba19d17b6e0 req-1e3f1eaa-8af4-48fc-95db-6810d66dd832 service nova] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Refreshing network info cache for port c7c80204-b8b4-46c6-8d93-38d4879119d3 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 629.560206] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bee8f6f6-4e15-42b0-9089-2e87fc2ffdba tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Lock "372e5569-8824-4841-b3d6-4b07423c7b3d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.832s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 629.601808] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 629.618797] env[70020]: DEBUG nova.network.neutron [req-5014693a-210e-4ec7-8f11-3198162276af req-354a0d7a-3e85-4ae7-ba0e-32bd25bcbba2 service nova] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Updated VIF entry in instance network info cache for port 16a8d745-ea55-4e94-9513-0b5547738678. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 629.619230] env[70020]: DEBUG nova.network.neutron [req-5014693a-210e-4ec7-8f11-3198162276af req-354a0d7a-3e85-4ae7-ba0e-32bd25bcbba2 service nova] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Updating instance_info_cache with network_info: [{"id": "16a8d745-ea55-4e94-9513-0b5547738678", "address": "fa:16:3e:fe:cb:ea", "network": {"id": "53830a9c-7c1f-4b18-ad07-2379bc00d366", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1462386900-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4f57b434a8be4f14923fe65d0ed24a72", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16a8d745-ea", "ovs_interfaceid": "16a8d745-ea55-4e94-9513-0b5547738678", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 629.683158] env[70020]: DEBUG oslo_vmware.api [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617717, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.733241] env[70020]: DEBUG oslo_vmware.api [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52262b1f-796e-a6a4-9280-fb775e021e19, 'name': SearchDatastore_Task, 'duration_secs': 0.015587} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.733635] env[70020]: DEBUG oslo_concurrency.lockutils [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 629.734064] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 629.734405] env[70020]: DEBUG oslo_concurrency.lockutils [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.734576] env[70020]: DEBUG oslo_concurrency.lockutils [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 629.734757] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 629.735060] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-af6ec419-e0d8-4196-90a2-00cf1d5abdbb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.743978] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 629.744205] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 629.744932] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c25891d-fdd2-4ed3-8b17-2b81b7c11049 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.750985] env[70020]: DEBUG oslo_vmware.api [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Waiting for the task: (returnval){ [ 629.750985] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5220a8ba-2889-2a03-0868-d62c90aee0bf" [ 629.750985] env[70020]: _type = "Task" [ 629.750985] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.759720] env[70020]: DEBUG oslo_vmware.api [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5220a8ba-2889-2a03-0868-d62c90aee0bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.794547] env[70020]: DEBUG oslo_vmware.api [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Task: {'id': task-3617718, 'name': Rename_Task, 'duration_secs': 0.280541} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.794861] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 629.795150] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-698739d5-0a21-48bd-9cf4-59b99a8a2431 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.801551] env[70020]: DEBUG oslo_vmware.api [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Waiting for the task: (returnval){ [ 629.801551] env[70020]: value = "task-3617722" [ 629.801551] env[70020]: _type = "Task" [ 629.801551] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.810226] env[70020]: DEBUG oslo_vmware.api [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Task: {'id': task-3617722, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.846071] env[70020]: DEBUG nova.network.neutron [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Successfully updated port: 6348da2f-b0bd-499f-bf5e-b14a38d29438 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 629.883712] env[70020]: DEBUG oslo_vmware.api [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': task-3617719, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.140942} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.883712] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 629.883712] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c35823a-ea85-4e77-ab72-4b57425e6bb2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.907007] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Reconfiguring VM instance instance-00000007 to attach disk [datastore2] b0b825d4-534d-4d54-a0c4-b9e507726c47/b0b825d4-534d-4d54-a0c4-b9e507726c47.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 629.907007] env[70020]: ERROR nova.scheduler.client.report [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [req-b923cc4c-7dbd-4e73-815f-1f5b7a16be6d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-b923cc4c-7dbd-4e73-815f-1f5b7a16be6d"}]} [ 629.920570] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-44ade259-4dac-4dcf-afa6-dfeb3528dc0d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.947031] env[70020]: DEBUG oslo_vmware.api [None req-9d9e1da6-68b7-4cf8-80ac-f6587328453a tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Task: {'id': task-3617721, 'name': PowerOffVM_Task, 'duration_secs': 0.138293} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.947616] env[70020]: DEBUG oslo_vmware.api [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Task: {'id': task-3617720, 'name': Rename_Task, 'duration_secs': 0.15387} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.950772] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d9e1da6-68b7-4cf8-80ac-f6587328453a tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 629.950961] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9d9e1da6-68b7-4cf8-80ac-f6587328453a tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 629.951438] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 629.951516] env[70020]: DEBUG oslo_vmware.api [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Waiting for the task: (returnval){ [ 629.951516] env[70020]: value = "task-3617723" [ 629.951516] env[70020]: _type = "Task" [ 629.951516] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.951715] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3d94574c-25eb-4b2f-b2ff-57bf657a2a08 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.953400] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e9e11cd8-c732-4231-b579-27d260abb06a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.960883] env[70020]: DEBUG nova.scheduler.client.report [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 629.968313] env[70020]: DEBUG oslo_vmware.api [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': task-3617723, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.968767] env[70020]: DEBUG oslo_vmware.api [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Waiting for the task: (returnval){ [ 629.968767] env[70020]: value = "task-3617725" [ 629.968767] env[70020]: _type = "Task" [ 629.968767] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.977171] env[70020]: DEBUG oslo_vmware.api [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Task: {'id': task-3617725, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.991382] env[70020]: DEBUG nova.scheduler.client.report [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 629.991382] env[70020]: DEBUG nova.compute.provider_tree [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 630.018144] env[70020]: DEBUG nova.scheduler.client.report [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 630.020558] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9d9e1da6-68b7-4cf8-80ac-f6587328453a tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 630.020946] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9d9e1da6-68b7-4cf8-80ac-f6587328453a tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 630.021229] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d9e1da6-68b7-4cf8-80ac-f6587328453a tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Deleting the datastore file [datastore2] 0cc49db6-1574-4e51-8692-b79ee14bc25d {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 630.021820] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a741378c-4c8b-4321-a6c7-b8c95acd885b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.031101] env[70020]: DEBUG oslo_vmware.api [None req-9d9e1da6-68b7-4cf8-80ac-f6587328453a tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Waiting for the task: (returnval){ [ 630.031101] env[70020]: value = "task-3617726" [ 630.031101] env[70020]: _type = "Task" [ 630.031101] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.040727] env[70020]: DEBUG oslo_vmware.api [None req-9d9e1da6-68b7-4cf8-80ac-f6587328453a tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Task: {'id': task-3617726, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.053446] env[70020]: DEBUG nova.scheduler.client.report [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 630.067444] env[70020]: DEBUG nova.compute.manager [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 630.127886] env[70020]: DEBUG oslo_concurrency.lockutils [req-5014693a-210e-4ec7-8f11-3198162276af req-354a0d7a-3e85-4ae7-ba0e-32bd25bcbba2 service nova] Releasing lock "refresh_cache-79d98176-b566-4349-ad10-c2ea6fdbc657" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 630.128166] env[70020]: DEBUG nova.compute.manager [req-5014693a-210e-4ec7-8f11-3198162276af req-354a0d7a-3e85-4ae7-ba0e-32bd25bcbba2 service nova] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Received event network-vif-deleted-154faccc-5d99-43cc-a66e-9c06bcc5fdf2 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 630.185257] env[70020]: DEBUG oslo_vmware.api [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617717, 'name': CreateSnapshot_Task, 'duration_secs': 1.006508} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.185609] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Created Snapshot of the VM instance {{(pid=70020) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 630.186498] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e18c64f0-2260-46be-a6d3-234370a9966b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.264781] env[70020]: DEBUG oslo_vmware.api [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5220a8ba-2889-2a03-0868-d62c90aee0bf, 'name': SearchDatastore_Task, 'duration_secs': 0.009851} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.269076] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7bec7a6-3d3f-4e35-acbb-7b3fdb0903c8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.277020] env[70020]: DEBUG oslo_vmware.api [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Waiting for the task: (returnval){ [ 630.277020] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52d31b97-3b87-e04f-f664-9478a458ed73" [ 630.277020] env[70020]: _type = "Task" [ 630.277020] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.289799] env[70020]: DEBUG oslo_vmware.api [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52d31b97-3b87-e04f-f664-9478a458ed73, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.316046] env[70020]: DEBUG oslo_vmware.api [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Task: {'id': task-3617722, 'name': PowerOnVM_Task} progress is 1%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.350023] env[70020]: DEBUG oslo_concurrency.lockutils [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquiring lock "refresh_cache-ea97f6ab-057e-44d3-835a-68b46d241621" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.350023] env[70020]: DEBUG oslo_concurrency.lockutils [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquired lock "refresh_cache-ea97f6ab-057e-44d3-835a-68b46d241621" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 630.350023] env[70020]: DEBUG nova.network.neutron [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 630.425867] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4d54277-c228-43dd-a1a1-b2c539c485b7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.435577] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d281146c-7232-4950-b11b-e65c7cd860db {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.481049] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e47ade98-45a5-42b4-8613-1bef81e66baa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.498139] env[70020]: DEBUG oslo_vmware.api [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': task-3617723, 'name': ReconfigVM_Task, 'duration_secs': 0.399631} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.504925] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Reconfigured VM instance instance-00000007 to attach disk [datastore2] b0b825d4-534d-4d54-a0c4-b9e507726c47/b0b825d4-534d-4d54-a0c4-b9e507726c47.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 630.505622] env[70020]: DEBUG oslo_vmware.api [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Task: {'id': task-3617725, 'name': PowerOnVM_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.505763] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-27181e51-bab4-4c62-b1a1-12dc43b50e6a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.508963] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00d990ff-3819-44ad-991a-6fc8732f4adf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.527302] env[70020]: DEBUG nova.compute.provider_tree [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 630.531135] env[70020]: DEBUG oslo_vmware.api [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Waiting for the task: (returnval){ [ 630.531135] env[70020]: value = "task-3617727" [ 630.531135] env[70020]: _type = "Task" [ 630.531135] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.549286] env[70020]: DEBUG oslo_vmware.api [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': task-3617727, 'name': Rename_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.549815] env[70020]: DEBUG oslo_vmware.api [None req-9d9e1da6-68b7-4cf8-80ac-f6587328453a tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Task: {'id': task-3617726, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.153051} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.549926] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d9e1da6-68b7-4cf8-80ac-f6587328453a tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 630.550076] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9d9e1da6-68b7-4cf8-80ac-f6587328453a tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 630.550287] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9d9e1da6-68b7-4cf8-80ac-f6587328453a tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 630.550655] env[70020]: INFO nova.compute.manager [None req-9d9e1da6-68b7-4cf8-80ac-f6587328453a tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Took 1.19 seconds to destroy the instance on the hypervisor. [ 630.550871] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9d9e1da6-68b7-4cf8-80ac-f6587328453a tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 630.551110] env[70020]: DEBUG nova.compute.manager [-] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 630.551210] env[70020]: DEBUG nova.network.neutron [-] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 630.587130] env[70020]: DEBUG nova.network.neutron [-] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 630.606133] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 630.716151] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Creating linked-clone VM from snapshot {{(pid=70020) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 630.716515] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-f73a79f9-4d3d-4bb9-a5bc-4ebff838cb4c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.748383] env[70020]: DEBUG oslo_vmware.api [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 630.748383] env[70020]: value = "task-3617728" [ 630.748383] env[70020]: _type = "Task" [ 630.748383] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.760038] env[70020]: DEBUG oslo_vmware.api [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617728, 'name': CloneVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.789977] env[70020]: DEBUG oslo_vmware.api [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52d31b97-3b87-e04f-f664-9478a458ed73, 'name': SearchDatastore_Task, 'duration_secs': 0.015654} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.790277] env[70020]: DEBUG oslo_concurrency.lockutils [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 630.790585] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] d601179a-df77-4f2e-b8df-9185b8a485e3/d601179a-df77-4f2e-b8df-9185b8a485e3.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 630.790892] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e564a064-a5e2-4968-a6b7-c080151bcc80 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.805050] env[70020]: DEBUG oslo_vmware.api [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Waiting for the task: (returnval){ [ 630.805050] env[70020]: value = "task-3617729" [ 630.805050] env[70020]: _type = "Task" [ 630.805050] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.825609] env[70020]: DEBUG oslo_vmware.api [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': task-3617729, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.825838] env[70020]: DEBUG oslo_vmware.api [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Task: {'id': task-3617722, 'name': PowerOnVM_Task} progress is 82%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.930643] env[70020]: DEBUG nova.compute.manager [req-c2ef0e8b-3217-4c39-950d-511acdc6f4b7 req-44540a1c-5c44-479f-99d1-f7a272a04183 service nova] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Received event network-changed-27c6992d-5e25-418c-83e7-a49ce44dee0e {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 630.930740] env[70020]: DEBUG nova.compute.manager [req-c2ef0e8b-3217-4c39-950d-511acdc6f4b7 req-44540a1c-5c44-479f-99d1-f7a272a04183 service nova] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Refreshing instance network info cache due to event network-changed-27c6992d-5e25-418c-83e7-a49ce44dee0e. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 630.930944] env[70020]: DEBUG oslo_concurrency.lockutils [req-c2ef0e8b-3217-4c39-950d-511acdc6f4b7 req-44540a1c-5c44-479f-99d1-f7a272a04183 service nova] Acquiring lock "refresh_cache-1f95bfa8-bc97-4ed7-8c33-c00297430bf5" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.931292] env[70020]: DEBUG oslo_concurrency.lockutils [req-c2ef0e8b-3217-4c39-950d-511acdc6f4b7 req-44540a1c-5c44-479f-99d1-f7a272a04183 service nova] Acquired lock "refresh_cache-1f95bfa8-bc97-4ed7-8c33-c00297430bf5" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 630.931292] env[70020]: DEBUG nova.network.neutron [req-c2ef0e8b-3217-4c39-950d-511acdc6f4b7 req-44540a1c-5c44-479f-99d1-f7a272a04183 service nova] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Refreshing network info cache for port 27c6992d-5e25-418c-83e7-a49ce44dee0e {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 630.940125] env[70020]: DEBUG nova.network.neutron [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 630.987946] env[70020]: DEBUG oslo_vmware.api [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Task: {'id': task-3617725, 'name': PowerOnVM_Task, 'duration_secs': 0.569726} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.987946] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 630.988297] env[70020]: INFO nova.compute.manager [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Took 7.92 seconds to spawn the instance on the hypervisor. [ 630.988297] env[70020]: DEBUG nova.compute.manager [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 630.989133] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e667ccf5-cbb1-40e4-81dd-78bcbcbe1341 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.033164] env[70020]: DEBUG nova.scheduler.client.report [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 631.063373] env[70020]: DEBUG oslo_vmware.api [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': task-3617727, 'name': Rename_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.094124] env[70020]: DEBUG nova.network.neutron [-] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 631.163530] env[70020]: DEBUG nova.network.neutron [req-82926c64-27b2-4e6f-bc44-9ba19d17b6e0 req-1e3f1eaa-8af4-48fc-95db-6810d66dd832 service nova] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Updated VIF entry in instance network info cache for port c7c80204-b8b4-46c6-8d93-38d4879119d3. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 631.164259] env[70020]: DEBUG nova.network.neutron [req-82926c64-27b2-4e6f-bc44-9ba19d17b6e0 req-1e3f1eaa-8af4-48fc-95db-6810d66dd832 service nova] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Updating instance_info_cache with network_info: [{"id": "c7c80204-b8b4-46c6-8d93-38d4879119d3", "address": "fa:16:3e:20:f4:6e", "network": {"id": "e0413f92-dcf6-413d-b61f-14b064f9a1d8", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-493233975-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3454ca4e376d4017891025c3a36cebf8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "778b9a40-d603-4765-ac88-bd6d42c457a2", "external-id": "nsx-vlan-transportzone-114", "segmentation_id": 114, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7c80204-b8", "ovs_interfaceid": "c7c80204-b8b4-46c6-8d93-38d4879119d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 631.260935] env[70020]: DEBUG oslo_vmware.api [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617728, 'name': CloneVM_Task} progress is 93%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.319939] env[70020]: DEBUG nova.network.neutron [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Updating instance_info_cache with network_info: [{"id": "6348da2f-b0bd-499f-bf5e-b14a38d29438", "address": "fa:16:3e:aa:ce:b2", "network": {"id": "943dd639-07d5-4e71-8d72-01499704fb50", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-408902375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3a2dc07c1d447ea81ca142d80ab4210", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6348da2f-b0", "ovs_interfaceid": "6348da2f-b0bd-499f-bf5e-b14a38d29438", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 631.326629] env[70020]: DEBUG oslo_vmware.api [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Task: {'id': task-3617722, 'name': PowerOnVM_Task, 'duration_secs': 1.239901} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.326916] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 631.327314] env[70020]: INFO nova.compute.manager [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Took 10.62 seconds to spawn the instance on the hypervisor. [ 631.327683] env[70020]: DEBUG nova.compute.manager [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 631.329601] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-983c4acb-f847-43f1-987b-b3c20796c804 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.338689] env[70020]: DEBUG oslo_vmware.api [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': task-3617729, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.528284] env[70020]: INFO nova.compute.manager [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Took 15.82 seconds to build instance. [ 631.554620] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.638s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 631.555219] env[70020]: DEBUG nova.compute.manager [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 631.558879] env[70020]: DEBUG oslo_vmware.api [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': task-3617727, 'name': Rename_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.559379] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.619s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 631.564587] env[70020]: INFO nova.compute.claims [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 631.599630] env[70020]: INFO nova.compute.manager [-] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Took 1.05 seconds to deallocate network for instance. [ 631.670763] env[70020]: DEBUG oslo_concurrency.lockutils [req-82926c64-27b2-4e6f-bc44-9ba19d17b6e0 req-1e3f1eaa-8af4-48fc-95db-6810d66dd832 service nova] Releasing lock "refresh_cache-6a114dce-7ed3-46e1-9d50-c3dd6efd340c" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 631.670763] env[70020]: DEBUG nova.compute.manager [req-82926c64-27b2-4e6f-bc44-9ba19d17b6e0 req-1e3f1eaa-8af4-48fc-95db-6810d66dd832 service nova] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Received event network-vif-plugged-4e709a63-45c3-48e8-8762-26e149c61266 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 631.670884] env[70020]: DEBUG oslo_concurrency.lockutils [req-82926c64-27b2-4e6f-bc44-9ba19d17b6e0 req-1e3f1eaa-8af4-48fc-95db-6810d66dd832 service nova] Acquiring lock "d601179a-df77-4f2e-b8df-9185b8a485e3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.672252] env[70020]: DEBUG oslo_concurrency.lockutils [req-82926c64-27b2-4e6f-bc44-9ba19d17b6e0 req-1e3f1eaa-8af4-48fc-95db-6810d66dd832 service nova] Lock "d601179a-df77-4f2e-b8df-9185b8a485e3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 631.672252] env[70020]: DEBUG oslo_concurrency.lockutils [req-82926c64-27b2-4e6f-bc44-9ba19d17b6e0 req-1e3f1eaa-8af4-48fc-95db-6810d66dd832 service nova] Lock "d601179a-df77-4f2e-b8df-9185b8a485e3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 631.672252] env[70020]: DEBUG nova.compute.manager [req-82926c64-27b2-4e6f-bc44-9ba19d17b6e0 req-1e3f1eaa-8af4-48fc-95db-6810d66dd832 service nova] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] No waiting events found dispatching network-vif-plugged-4e709a63-45c3-48e8-8762-26e149c61266 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 631.672252] env[70020]: WARNING nova.compute.manager [req-82926c64-27b2-4e6f-bc44-9ba19d17b6e0 req-1e3f1eaa-8af4-48fc-95db-6810d66dd832 service nova] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Received unexpected event network-vif-plugged-4e709a63-45c3-48e8-8762-26e149c61266 for instance with vm_state building and task_state spawning. [ 631.672252] env[70020]: DEBUG nova.compute.manager [req-82926c64-27b2-4e6f-bc44-9ba19d17b6e0 req-1e3f1eaa-8af4-48fc-95db-6810d66dd832 service nova] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Received event network-changed-4e709a63-45c3-48e8-8762-26e149c61266 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 631.672472] env[70020]: DEBUG nova.compute.manager [req-82926c64-27b2-4e6f-bc44-9ba19d17b6e0 req-1e3f1eaa-8af4-48fc-95db-6810d66dd832 service nova] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Refreshing instance network info cache due to event network-changed-4e709a63-45c3-48e8-8762-26e149c61266. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 631.672472] env[70020]: DEBUG oslo_concurrency.lockutils [req-82926c64-27b2-4e6f-bc44-9ba19d17b6e0 req-1e3f1eaa-8af4-48fc-95db-6810d66dd832 service nova] Acquiring lock "refresh_cache-d601179a-df77-4f2e-b8df-9185b8a485e3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 631.672472] env[70020]: DEBUG oslo_concurrency.lockutils [req-82926c64-27b2-4e6f-bc44-9ba19d17b6e0 req-1e3f1eaa-8af4-48fc-95db-6810d66dd832 service nova] Acquired lock "refresh_cache-d601179a-df77-4f2e-b8df-9185b8a485e3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 631.672472] env[70020]: DEBUG nova.network.neutron [req-82926c64-27b2-4e6f-bc44-9ba19d17b6e0 req-1e3f1eaa-8af4-48fc-95db-6810d66dd832 service nova] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Refreshing network info cache for port 4e709a63-45c3-48e8-8762-26e149c61266 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 631.764199] env[70020]: DEBUG oslo_vmware.api [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617728, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.829219] env[70020]: DEBUG oslo_concurrency.lockutils [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Releasing lock "refresh_cache-ea97f6ab-057e-44d3-835a-68b46d241621" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 631.829574] env[70020]: DEBUG nova.compute.manager [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Instance network_info: |[{"id": "6348da2f-b0bd-499f-bf5e-b14a38d29438", "address": "fa:16:3e:aa:ce:b2", "network": {"id": "943dd639-07d5-4e71-8d72-01499704fb50", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-408902375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3a2dc07c1d447ea81ca142d80ab4210", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6348da2f-b0", "ovs_interfaceid": "6348da2f-b0bd-499f-bf5e-b14a38d29438", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 631.830586] env[70020]: DEBUG oslo_vmware.api [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': task-3617729, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.578488} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.836444] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:aa:ce:b2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39ab9baf-90cd-4fe2-8d56-434f8210fc19', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6348da2f-b0bd-499f-bf5e-b14a38d29438', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 631.846202] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Creating folder: Project (b3a2dc07c1d447ea81ca142d80ab4210). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 631.846569] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] d601179a-df77-4f2e-b8df-9185b8a485e3/d601179a-df77-4f2e-b8df-9185b8a485e3.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 631.846827] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 631.847516] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1c78bfb5-f81c-49e6-b19f-822d8a52adc4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.849776] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-312f533b-a0ee-4cfd-88fc-8894d301b32b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.859820] env[70020]: INFO nova.compute.manager [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Took 17.71 seconds to build instance. [ 631.864082] env[70020]: DEBUG oslo_vmware.api [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Waiting for the task: (returnval){ [ 631.864082] env[70020]: value = "task-3617731" [ 631.864082] env[70020]: _type = "Task" [ 631.864082] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.870586] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Created folder: Project (b3a2dc07c1d447ea81ca142d80ab4210) in parent group-v721521. [ 631.870872] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Creating folder: Instances. Parent ref: group-v721561. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 631.871177] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-176f1d27-effe-440d-b5e1-509213ffb2e6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.878255] env[70020]: DEBUG oslo_vmware.api [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': task-3617731, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.886435] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Created folder: Instances in parent group-v721561. [ 631.886732] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 631.886933] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 631.887162] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2a678ca0-5c19-4fd8-b008-2dddba937db1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.913299] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 631.913299] env[70020]: value = "task-3617733" [ 631.913299] env[70020]: _type = "Task" [ 631.913299] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.923634] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617733, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.030531] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29b4d9e1-169a-4b47-94a5-9dc4f659ee82 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Lock "79d98176-b566-4349-ad10-c2ea6fdbc657" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.342s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 632.056079] env[70020]: DEBUG oslo_vmware.api [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': task-3617727, 'name': Rename_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.069397] env[70020]: DEBUG nova.compute.utils [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 632.074283] env[70020]: DEBUG nova.compute.manager [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 632.074283] env[70020]: DEBUG nova.network.neutron [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 632.115495] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9d9e1da6-68b7-4cf8-80ac-f6587328453a tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 632.260606] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29be6e3d-93fd-4418-afca-2b1b9e68ad85 tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Acquiring lock "372e5569-8824-4841-b3d6-4b07423c7b3d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 632.261268] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29be6e3d-93fd-4418-afca-2b1b9e68ad85 tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Lock "372e5569-8824-4841-b3d6-4b07423c7b3d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 632.261268] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29be6e3d-93fd-4418-afca-2b1b9e68ad85 tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Acquiring lock "372e5569-8824-4841-b3d6-4b07423c7b3d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 632.261268] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29be6e3d-93fd-4418-afca-2b1b9e68ad85 tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Lock "372e5569-8824-4841-b3d6-4b07423c7b3d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 632.261744] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29be6e3d-93fd-4418-afca-2b1b9e68ad85 tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Lock "372e5569-8824-4841-b3d6-4b07423c7b3d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 632.263015] env[70020]: DEBUG oslo_vmware.api [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617728, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.264597] env[70020]: DEBUG nova.policy [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c0e22e21d3684201883adc3617ddee72', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b3a2dc07c1d447ea81ca142d80ab4210', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 632.266312] env[70020]: INFO nova.compute.manager [None req-29be6e3d-93fd-4418-afca-2b1b9e68ad85 tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Terminating instance [ 632.303893] env[70020]: DEBUG nova.network.neutron [req-c2ef0e8b-3217-4c39-950d-511acdc6f4b7 req-44540a1c-5c44-479f-99d1-f7a272a04183 service nova] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Updated VIF entry in instance network info cache for port 27c6992d-5e25-418c-83e7-a49ce44dee0e. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 632.303893] env[70020]: DEBUG nova.network.neutron [req-c2ef0e8b-3217-4c39-950d-511acdc6f4b7 req-44540a1c-5c44-479f-99d1-f7a272a04183 service nova] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Updating instance_info_cache with network_info: [{"id": "27c6992d-5e25-418c-83e7-a49ce44dee0e", "address": "fa:16:3e:d7:26:c6", "network": {"id": "023ef52b-8a34-4f0e-bc94-cf1fa8c3fb77", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1722246036-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f81ac3e65f9042f4bcf818cd216a32eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27c6992d-5e", "ovs_interfaceid": "27c6992d-5e25-418c-83e7-a49ce44dee0e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 632.362093] env[70020]: DEBUG oslo_concurrency.lockutils [None req-453ac8b1-c898-4f3e-919c-2a1a597f2e6d tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Lock "a0b4a0b0-748d-46eb-9e39-3f21e394c090" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.230s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 632.374586] env[70020]: DEBUG oslo_vmware.api [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': task-3617731, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.173132} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.375029] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 632.375950] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37d6c839-396d-4ed3-b6ab-ef4ad1890ddd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.404344] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Reconfiguring VM instance instance-0000000c to attach disk [datastore2] d601179a-df77-4f2e-b8df-9185b8a485e3/d601179a-df77-4f2e-b8df-9185b8a485e3.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 632.405521] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-acffff44-1742-4648-9fb3-34a230f53dfc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.431017] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617733, 'name': CreateVM_Task, 'duration_secs': 0.479516} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.432776] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 632.433160] env[70020]: DEBUG oslo_vmware.api [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Waiting for the task: (returnval){ [ 632.433160] env[70020]: value = "task-3617734" [ 632.433160] env[70020]: _type = "Task" [ 632.433160] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.433802] env[70020]: DEBUG oslo_concurrency.lockutils [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.433982] env[70020]: DEBUG oslo_concurrency.lockutils [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 632.434316] env[70020]: DEBUG oslo_concurrency.lockutils [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 632.434634] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79298ac2-74c9-4e7e-b6cd-5ef4c635d08f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.443469] env[70020]: DEBUG oslo_vmware.api [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 632.443469] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5262cba2-44b3-21a6-dcdf-c93f8f0f3ec4" [ 632.443469] env[70020]: _type = "Task" [ 632.443469] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.447791] env[70020]: DEBUG oslo_vmware.api [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': task-3617734, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.456489] env[70020]: DEBUG oslo_vmware.api [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5262cba2-44b3-21a6-dcdf-c93f8f0f3ec4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.556665] env[70020]: DEBUG oslo_vmware.api [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': task-3617727, 'name': Rename_Task, 'duration_secs': 1.540269} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.556965] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 632.557899] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-361599e2-378a-4f70-aaaa-8c2b44dfb5af {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.565804] env[70020]: DEBUG oslo_vmware.api [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Waiting for the task: (returnval){ [ 632.565804] env[70020]: value = "task-3617735" [ 632.565804] env[70020]: _type = "Task" [ 632.565804] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.576434] env[70020]: DEBUG oslo_vmware.api [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': task-3617735, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.586053] env[70020]: DEBUG nova.compute.manager [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 632.759820] env[70020]: DEBUG oslo_vmware.api [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617728, 'name': CloneVM_Task} progress is 95%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.771746] env[70020]: DEBUG nova.compute.manager [None req-29be6e3d-93fd-4418-afca-2b1b9e68ad85 tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 632.772181] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-29be6e3d-93fd-4418-afca-2b1b9e68ad85 tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 632.774875] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3937389c-e6c5-45cc-8688-72d00f66867e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.783521] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-29be6e3d-93fd-4418-afca-2b1b9e68ad85 tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 632.783865] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9aa728ea-a350-47d2-bed0-f10a7b233165 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.792011] env[70020]: DEBUG oslo_vmware.api [None req-29be6e3d-93fd-4418-afca-2b1b9e68ad85 tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Waiting for the task: (returnval){ [ 632.792011] env[70020]: value = "task-3617736" [ 632.792011] env[70020]: _type = "Task" [ 632.792011] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.804391] env[70020]: DEBUG oslo_vmware.api [None req-29be6e3d-93fd-4418-afca-2b1b9e68ad85 tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Task: {'id': task-3617736, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.809539] env[70020]: DEBUG oslo_concurrency.lockutils [req-c2ef0e8b-3217-4c39-950d-511acdc6f4b7 req-44540a1c-5c44-479f-99d1-f7a272a04183 service nova] Releasing lock "refresh_cache-1f95bfa8-bc97-4ed7-8c33-c00297430bf5" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 632.809649] env[70020]: DEBUG nova.compute.manager [req-c2ef0e8b-3217-4c39-950d-511acdc6f4b7 req-44540a1c-5c44-479f-99d1-f7a272a04183 service nova] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Received event network-vif-plugged-6348da2f-b0bd-499f-bf5e-b14a38d29438 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 632.809983] env[70020]: DEBUG oslo_concurrency.lockutils [req-c2ef0e8b-3217-4c39-950d-511acdc6f4b7 req-44540a1c-5c44-479f-99d1-f7a272a04183 service nova] Acquiring lock "ea97f6ab-057e-44d3-835a-68b46d241621-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 632.810463] env[70020]: DEBUG oslo_concurrency.lockutils [req-c2ef0e8b-3217-4c39-950d-511acdc6f4b7 req-44540a1c-5c44-479f-99d1-f7a272a04183 service nova] Lock "ea97f6ab-057e-44d3-835a-68b46d241621-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 632.810802] env[70020]: DEBUG oslo_concurrency.lockutils [req-c2ef0e8b-3217-4c39-950d-511acdc6f4b7 req-44540a1c-5c44-479f-99d1-f7a272a04183 service nova] Lock "ea97f6ab-057e-44d3-835a-68b46d241621-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 632.811446] env[70020]: DEBUG nova.compute.manager [req-c2ef0e8b-3217-4c39-950d-511acdc6f4b7 req-44540a1c-5c44-479f-99d1-f7a272a04183 service nova] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] No waiting events found dispatching network-vif-plugged-6348da2f-b0bd-499f-bf5e-b14a38d29438 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 632.811714] env[70020]: WARNING nova.compute.manager [req-c2ef0e8b-3217-4c39-950d-511acdc6f4b7 req-44540a1c-5c44-479f-99d1-f7a272a04183 service nova] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Received unexpected event network-vif-plugged-6348da2f-b0bd-499f-bf5e-b14a38d29438 for instance with vm_state building and task_state spawning. [ 632.812687] env[70020]: DEBUG nova.compute.manager [req-c2ef0e8b-3217-4c39-950d-511acdc6f4b7 req-44540a1c-5c44-479f-99d1-f7a272a04183 service nova] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Received event network-changed-6348da2f-b0bd-499f-bf5e-b14a38d29438 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 632.812940] env[70020]: DEBUG nova.compute.manager [req-c2ef0e8b-3217-4c39-950d-511acdc6f4b7 req-44540a1c-5c44-479f-99d1-f7a272a04183 service nova] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Refreshing instance network info cache due to event network-changed-6348da2f-b0bd-499f-bf5e-b14a38d29438. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 632.813240] env[70020]: DEBUG oslo_concurrency.lockutils [req-c2ef0e8b-3217-4c39-950d-511acdc6f4b7 req-44540a1c-5c44-479f-99d1-f7a272a04183 service nova] Acquiring lock "refresh_cache-ea97f6ab-057e-44d3-835a-68b46d241621" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.813454] env[70020]: DEBUG oslo_concurrency.lockutils [req-c2ef0e8b-3217-4c39-950d-511acdc6f4b7 req-44540a1c-5c44-479f-99d1-f7a272a04183 service nova] Acquired lock "refresh_cache-ea97f6ab-057e-44d3-835a-68b46d241621" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 632.813728] env[70020]: DEBUG nova.network.neutron [req-c2ef0e8b-3217-4c39-950d-511acdc6f4b7 req-44540a1c-5c44-479f-99d1-f7a272a04183 service nova] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Refreshing network info cache for port 6348da2f-b0bd-499f-bf5e-b14a38d29438 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 632.886944] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71da5dc4-b3d1-44a2-aa4d-b213cade1492 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.897066] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85f106bb-52e3-4d61-8848-d22567e9214e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.943516] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9d041c8-fcbb-4b1f-b312-aea56550088d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.960126] env[70020]: DEBUG oslo_vmware.api [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': task-3617734, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.961822] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b321f3e-65f6-4d74-a9c4-9f6d5b3525f0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.971104] env[70020]: DEBUG oslo_vmware.api [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5262cba2-44b3-21a6-dcdf-c93f8f0f3ec4, 'name': SearchDatastore_Task, 'duration_secs': 0.051755} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.971818] env[70020]: DEBUG oslo_concurrency.lockutils [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 632.972156] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 632.972433] env[70020]: DEBUG oslo_concurrency.lockutils [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.972608] env[70020]: DEBUG oslo_concurrency.lockutils [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 632.972914] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 632.973117] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bbfa52be-f248-455c-a40f-058af7035b45 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.985834] env[70020]: DEBUG nova.compute.provider_tree [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 632.997159] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 632.997159] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 632.997935] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa24e24a-874f-4710-902a-96214fec2f76 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.005887] env[70020]: DEBUG oslo_vmware.api [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 633.005887] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5208dd55-da38-0a80-1dc5-39247ff70512" [ 633.005887] env[70020]: _type = "Task" [ 633.005887] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.019327] env[70020]: DEBUG oslo_vmware.api [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5208dd55-da38-0a80-1dc5-39247ff70512, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.078533] env[70020]: DEBUG oslo_vmware.api [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': task-3617735, 'name': PowerOnVM_Task} progress is 88%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.262400] env[70020]: DEBUG oslo_vmware.api [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617728, 'name': CloneVM_Task, 'duration_secs': 2.254901} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.262731] env[70020]: INFO nova.virt.vmwareapi.vmops [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Created linked-clone VM from snapshot [ 633.263514] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08c408ba-295d-443d-a2e3-e28f510d7cda {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.273594] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Uploading image 0f059e4b-3d4c-472a-b58c-6cdbc63a03a0 {{(pid=70020) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 633.303945] env[70020]: DEBUG oslo_vmware.api [None req-29be6e3d-93fd-4418-afca-2b1b9e68ad85 tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Task: {'id': task-3617736, 'name': PowerOffVM_Task, 'duration_secs': 0.350649} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.306431] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-29be6e3d-93fd-4418-afca-2b1b9e68ad85 tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 633.306678] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-29be6e3d-93fd-4418-afca-2b1b9e68ad85 tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 633.307165] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2ac19587-5b8b-4e3e-8d0e-b77305d215db {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.319278] env[70020]: DEBUG oslo_vmware.rw_handles [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 633.319278] env[70020]: value = "vm-721560" [ 633.319278] env[70020]: _type = "VirtualMachine" [ 633.319278] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 633.321836] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-1447346f-8609-4813-8184-3be460b17f26 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.330766] env[70020]: DEBUG oslo_vmware.rw_handles [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lease: (returnval){ [ 633.330766] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]523632b3-f34e-8d28-e8c9-8d6c87b51d2d" [ 633.330766] env[70020]: _type = "HttpNfcLease" [ 633.330766] env[70020]: } obtained for exporting VM: (result){ [ 633.330766] env[70020]: value = "vm-721560" [ 633.330766] env[70020]: _type = "VirtualMachine" [ 633.330766] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 633.333165] env[70020]: DEBUG oslo_vmware.api [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the lease: (returnval){ [ 633.333165] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]523632b3-f34e-8d28-e8c9-8d6c87b51d2d" [ 633.333165] env[70020]: _type = "HttpNfcLease" [ 633.333165] env[70020]: } to be ready. {{(pid=70020) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 633.341403] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 633.341403] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]523632b3-f34e-8d28-e8c9-8d6c87b51d2d" [ 633.341403] env[70020]: _type = "HttpNfcLease" [ 633.341403] env[70020]: } is initializing. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 633.360458] env[70020]: DEBUG nova.network.neutron [req-82926c64-27b2-4e6f-bc44-9ba19d17b6e0 req-1e3f1eaa-8af4-48fc-95db-6810d66dd832 service nova] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Updated VIF entry in instance network info cache for port 4e709a63-45c3-48e8-8762-26e149c61266. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 633.360864] env[70020]: DEBUG nova.network.neutron [req-82926c64-27b2-4e6f-bc44-9ba19d17b6e0 req-1e3f1eaa-8af4-48fc-95db-6810d66dd832 service nova] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Updating instance_info_cache with network_info: [{"id": "4e709a63-45c3-48e8-8762-26e149c61266", "address": "fa:16:3e:ef:1c:11", "network": {"id": "b284478c-c5d9-48ae-8f9e-db4ed9b7d525", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-354423789-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "add37b0346e74e7f9724e69253e2cffc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b931c4c-f73c-4fbd-9c9f-0270834cc69e", "external-id": "nsx-vlan-transportzone-177", "segmentation_id": 177, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e709a63-45", "ovs_interfaceid": "4e709a63-45c3-48e8-8762-26e149c61266", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 633.398033] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-29be6e3d-93fd-4418-afca-2b1b9e68ad85 tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 633.398033] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-29be6e3d-93fd-4418-afca-2b1b9e68ad85 tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 633.398033] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-29be6e3d-93fd-4418-afca-2b1b9e68ad85 tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Deleting the datastore file [datastore2] 372e5569-8824-4841-b3d6-4b07423c7b3d {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 633.398033] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ae2aa68a-7fa3-485f-9ab9-c7a1e4afa070 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.409964] env[70020]: DEBUG oslo_vmware.api [None req-29be6e3d-93fd-4418-afca-2b1b9e68ad85 tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Waiting for the task: (returnval){ [ 633.409964] env[70020]: value = "task-3617739" [ 633.409964] env[70020]: _type = "Task" [ 633.409964] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.421679] env[70020]: DEBUG oslo_vmware.api [None req-29be6e3d-93fd-4418-afca-2b1b9e68ad85 tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Task: {'id': task-3617739, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.452178] env[70020]: DEBUG oslo_vmware.api [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': task-3617734, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.519362] env[70020]: DEBUG oslo_vmware.api [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5208dd55-da38-0a80-1dc5-39247ff70512, 'name': SearchDatastore_Task, 'duration_secs': 0.016339} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.520613] env[70020]: ERROR nova.scheduler.client.report [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [req-a62fe9dd-9658-4dad-b2ef-8e7ad22deada] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a62fe9dd-9658-4dad-b2ef-8e7ad22deada"}]} [ 633.523905] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21e3d7a5-0e3c-4604-99d8-e07898bcc073 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.531283] env[70020]: DEBUG oslo_vmware.api [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 633.531283] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52ba5065-6740-f634-fd34-946b443f3b54" [ 633.531283] env[70020]: _type = "Task" [ 633.531283] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.545032] env[70020]: DEBUG oslo_vmware.api [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ba5065-6740-f634-fd34-946b443f3b54, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.552452] env[70020]: DEBUG nova.scheduler.client.report [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 633.575655] env[70020]: DEBUG nova.scheduler.client.report [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 633.576470] env[70020]: DEBUG nova.compute.provider_tree [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 633.588428] env[70020]: DEBUG oslo_vmware.api [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': task-3617735, 'name': PowerOnVM_Task, 'duration_secs': 0.894858} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.588428] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 633.588428] env[70020]: DEBUG nova.compute.manager [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 633.588428] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7bbfe6b-9a4f-4336-84d1-6a3871c6502d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.598235] env[70020]: DEBUG nova.compute.manager [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 633.604647] env[70020]: DEBUG nova.scheduler.client.report [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 633.632923] env[70020]: DEBUG nova.scheduler.client.report [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 633.653413] env[70020]: DEBUG nova.virt.hardware [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 633.653653] env[70020]: DEBUG nova.virt.hardware [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 633.653816] env[70020]: DEBUG nova.virt.hardware [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 633.653992] env[70020]: DEBUG nova.virt.hardware [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 633.655240] env[70020]: DEBUG nova.virt.hardware [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 633.655546] env[70020]: DEBUG nova.virt.hardware [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 633.658294] env[70020]: DEBUG nova.virt.hardware [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 633.658294] env[70020]: DEBUG nova.virt.hardware [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 633.658294] env[70020]: DEBUG nova.virt.hardware [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 633.658294] env[70020]: DEBUG nova.virt.hardware [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 633.658294] env[70020]: DEBUG nova.virt.hardware [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 633.659161] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef26188a-d577-4b39-be12-4901408203b3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.685922] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fae88d13-c9f6-4e16-8ba4-9fad383aa1af {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.756978] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Acquiring lock "301b30f6-9909-4fc9-8721-88a314e4edb4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 633.757415] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Lock "301b30f6-9909-4fc9-8721-88a314e4edb4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 633.847635] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 633.847635] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]523632b3-f34e-8d28-e8c9-8d6c87b51d2d" [ 633.847635] env[70020]: _type = "HttpNfcLease" [ 633.847635] env[70020]: } is ready. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 633.847635] env[70020]: DEBUG oslo_vmware.rw_handles [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 633.847635] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]523632b3-f34e-8d28-e8c9-8d6c87b51d2d" [ 633.847635] env[70020]: _type = "HttpNfcLease" [ 633.847635] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 633.847635] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-034b2a7d-a94e-49ba-8dad-403146ea169f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.864494] env[70020]: DEBUG oslo_concurrency.lockutils [req-82926c64-27b2-4e6f-bc44-9ba19d17b6e0 req-1e3f1eaa-8af4-48fc-95db-6810d66dd832 service nova] Releasing lock "refresh_cache-d601179a-df77-4f2e-b8df-9185b8a485e3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 633.865654] env[70020]: DEBUG oslo_vmware.rw_handles [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524a2ca6-a3a8-a8fd-2aeb-829b72293655/disk-0.vmdk from lease info. {{(pid=70020) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 633.870853] env[70020]: DEBUG oslo_vmware.rw_handles [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524a2ca6-a3a8-a8fd-2aeb-829b72293655/disk-0.vmdk for reading. {{(pid=70020) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 633.880637] env[70020]: DEBUG nova.network.neutron [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Successfully created port: 063f2ba9-4e54-4d7e-9771-1defdefc3c00 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 633.971551] env[70020]: DEBUG oslo_vmware.api [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': task-3617734, 'name': ReconfigVM_Task, 'duration_secs': 1.217953} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.975019] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Reconfigured VM instance instance-0000000c to attach disk [datastore2] d601179a-df77-4f2e-b8df-9185b8a485e3/d601179a-df77-4f2e-b8df-9185b8a485e3.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 633.975975] env[70020]: DEBUG oslo_vmware.api [None req-29be6e3d-93fd-4418-afca-2b1b9e68ad85 tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Task: {'id': task-3617739, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.191728} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.979337] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8a11684d-5e0c-4ccb-af38-e23221ae8305 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.981383] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-29be6e3d-93fd-4418-afca-2b1b9e68ad85 tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 633.981648] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-29be6e3d-93fd-4418-afca-2b1b9e68ad85 tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 633.981981] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-29be6e3d-93fd-4418-afca-2b1b9e68ad85 tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 633.982213] env[70020]: INFO nova.compute.manager [None req-29be6e3d-93fd-4418-afca-2b1b9e68ad85 tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Took 1.21 seconds to destroy the instance on the hypervisor. [ 633.982749] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-29be6e3d-93fd-4418-afca-2b1b9e68ad85 tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 633.985570] env[70020]: DEBUG nova.compute.manager [-] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 633.985687] env[70020]: DEBUG nova.network.neutron [-] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 633.995467] env[70020]: DEBUG oslo_vmware.api [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Waiting for the task: (returnval){ [ 633.995467] env[70020]: value = "task-3617740" [ 633.995467] env[70020]: _type = "Task" [ 633.995467] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.013609] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-5df30a88-746c-4fba-b4ad-090083f95ce5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.013609] env[70020]: DEBUG oslo_vmware.api [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': task-3617740, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.049555] env[70020]: DEBUG oslo_vmware.api [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ba5065-6740-f634-fd34-946b443f3b54, 'name': SearchDatastore_Task, 'duration_secs': 0.023444} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.050401] env[70020]: DEBUG oslo_concurrency.lockutils [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 634.051223] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] ea97f6ab-057e-44d3-835a-68b46d241621/ea97f6ab-057e-44d3-835a-68b46d241621.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 634.051713] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8edc15ee-ebb6-4575-80f3-15d06bf19dcc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.067858] env[70020]: DEBUG oslo_vmware.api [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 634.067858] env[70020]: value = "task-3617741" [ 634.067858] env[70020]: _type = "Task" [ 634.067858] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.082438] env[70020]: DEBUG oslo_vmware.api [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3617741, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.115111] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 634.144469] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4096f3d9-042e-4a2a-afe3-522abb04eac8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.155884] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b07b1fe-b84c-4227-a0b8-4cb396769165 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.200212] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83365329-cfbe-4501-a938-6fdb8d727fcd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.209795] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d3febfb-271d-4802-bbc2-732ca205c0f5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.229237] env[70020]: DEBUG nova.compute.provider_tree [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 634.263967] env[70020]: DEBUG nova.compute.manager [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 634.466909] env[70020]: DEBUG nova.network.neutron [req-c2ef0e8b-3217-4c39-950d-511acdc6f4b7 req-44540a1c-5c44-479f-99d1-f7a272a04183 service nova] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Updated VIF entry in instance network info cache for port 6348da2f-b0bd-499f-bf5e-b14a38d29438. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 634.467407] env[70020]: DEBUG nova.network.neutron [req-c2ef0e8b-3217-4c39-950d-511acdc6f4b7 req-44540a1c-5c44-479f-99d1-f7a272a04183 service nova] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Updating instance_info_cache with network_info: [{"id": "6348da2f-b0bd-499f-bf5e-b14a38d29438", "address": "fa:16:3e:aa:ce:b2", "network": {"id": "943dd639-07d5-4e71-8d72-01499704fb50", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-408902375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3a2dc07c1d447ea81ca142d80ab4210", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6348da2f-b0", "ovs_interfaceid": "6348da2f-b0bd-499f-bf5e-b14a38d29438", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.510889] env[70020]: DEBUG oslo_vmware.api [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': task-3617740, 'name': Rename_Task, 'duration_secs': 0.23048} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.511551] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 634.512018] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2616354a-7d51-486b-82ce-f8ece1925826 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.523798] env[70020]: DEBUG oslo_vmware.api [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Waiting for the task: (returnval){ [ 634.523798] env[70020]: value = "task-3617742" [ 634.523798] env[70020]: _type = "Task" [ 634.523798] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.539868] env[70020]: DEBUG oslo_vmware.api [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': task-3617742, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.582689] env[70020]: DEBUG oslo_vmware.api [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3617741, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.656240] env[70020]: DEBUG nova.compute.manager [req-c45791b5-4f7c-489e-b7a9-ceac3a8b84b0 req-3472560f-f6f2-4d4e-b4d0-7d0335a3d34b service nova] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Received event network-changed-3af5d84e-e814-4689-aa70-e63d58041799 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 634.656240] env[70020]: DEBUG nova.compute.manager [req-c45791b5-4f7c-489e-b7a9-ceac3a8b84b0 req-3472560f-f6f2-4d4e-b4d0-7d0335a3d34b service nova] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Refreshing instance network info cache due to event network-changed-3af5d84e-e814-4689-aa70-e63d58041799. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 634.656240] env[70020]: DEBUG oslo_concurrency.lockutils [req-c45791b5-4f7c-489e-b7a9-ceac3a8b84b0 req-3472560f-f6f2-4d4e-b4d0-7d0335a3d34b service nova] Acquiring lock "refresh_cache-bb4e4986-af2a-4832-9ec7-777bca863dce" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.656240] env[70020]: DEBUG oslo_concurrency.lockutils [req-c45791b5-4f7c-489e-b7a9-ceac3a8b84b0 req-3472560f-f6f2-4d4e-b4d0-7d0335a3d34b service nova] Acquired lock "refresh_cache-bb4e4986-af2a-4832-9ec7-777bca863dce" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 634.658354] env[70020]: DEBUG nova.network.neutron [req-c45791b5-4f7c-489e-b7a9-ceac3a8b84b0 req-3472560f-f6f2-4d4e-b4d0-7d0335a3d34b service nova] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Refreshing network info cache for port 3af5d84e-e814-4689-aa70-e63d58041799 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 634.735089] env[70020]: DEBUG nova.scheduler.client.report [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 634.806185] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 634.973622] env[70020]: DEBUG oslo_concurrency.lockutils [req-c2ef0e8b-3217-4c39-950d-511acdc6f4b7 req-44540a1c-5c44-479f-99d1-f7a272a04183 service nova] Releasing lock "refresh_cache-ea97f6ab-057e-44d3-835a-68b46d241621" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 635.051396] env[70020]: DEBUG oslo_vmware.api [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': task-3617742, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.084108] env[70020]: DEBUG oslo_vmware.api [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3617741, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.803406} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.084390] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] ea97f6ab-057e-44d3-835a-68b46d241621/ea97f6ab-057e-44d3-835a-68b46d241621.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 635.084691] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 635.084934] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cb7a9852-edd3-4858-984c-4c389559b96f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.092098] env[70020]: DEBUG oslo_vmware.api [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 635.092098] env[70020]: value = "task-3617743" [ 635.092098] env[70020]: _type = "Task" [ 635.092098] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.101587] env[70020]: DEBUG oslo_vmware.api [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3617743, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.242383] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.683s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 635.243239] env[70020]: DEBUG nova.compute.manager [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 635.246739] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.973s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 635.248822] env[70020]: INFO nova.compute.claims [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 635.540387] env[70020]: DEBUG oslo_vmware.api [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': task-3617742, 'name': PowerOnVM_Task, 'duration_secs': 0.838097} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.540554] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 635.540844] env[70020]: INFO nova.compute.manager [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Took 10.06 seconds to spawn the instance on the hypervisor. [ 635.541058] env[70020]: DEBUG nova.compute.manager [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 635.541906] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dee3fe92-be43-4956-aa31-7fb02c67f2d0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.609022] env[70020]: DEBUG oslo_vmware.api [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3617743, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.11181} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.613563] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 635.614446] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4660b935-09db-49ed-96fd-a689795519dd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.653252] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Reconfiguring VM instance instance-0000000d to attach disk [datastore2] ea97f6ab-057e-44d3-835a-68b46d241621/ea97f6ab-057e-44d3-835a-68b46d241621.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 635.653252] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-55325f5c-cf03-452b-945f-a69cf32d3f47 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.679567] env[70020]: DEBUG oslo_vmware.api [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 635.679567] env[70020]: value = "task-3617744" [ 635.679567] env[70020]: _type = "Task" [ 635.679567] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.692825] env[70020]: DEBUG oslo_vmware.api [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3617744, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.747517] env[70020]: DEBUG nova.network.neutron [-] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.754450] env[70020]: DEBUG nova.compute.utils [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 635.766438] env[70020]: DEBUG nova.compute.manager [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 635.766438] env[70020]: DEBUG nova.network.neutron [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 635.938103] env[70020]: DEBUG nova.policy [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '585b37191dc04f67b47b69bff0203b42', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8444a0cca1194f8e9e190e02a82e9556', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 636.064337] env[70020]: INFO nova.compute.manager [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Took 18.71 seconds to build instance. [ 636.189963] env[70020]: DEBUG oslo_vmware.api [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3617744, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.250665] env[70020]: INFO nova.compute.manager [-] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Took 2.26 seconds to deallocate network for instance. [ 636.266853] env[70020]: DEBUG nova.compute.manager [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 636.569558] env[70020]: DEBUG oslo_concurrency.lockutils [None req-56a05712-2a83-4478-8108-9aa3f860e593 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Lock "d601179a-df77-4f2e-b8df-9185b8a485e3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.232s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 636.669634] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a672e93-69e0-490f-a9c9-089681202237 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.678567] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cb47173-11aa-49d6-841e-d032dd8df6c4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.692563] env[70020]: DEBUG oslo_vmware.api [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3617744, 'name': ReconfigVM_Task, 'duration_secs': 0.636122} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.721127] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Reconfigured VM instance instance-0000000d to attach disk [datastore2] ea97f6ab-057e-44d3-835a-68b46d241621/ea97f6ab-057e-44d3-835a-68b46d241621.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 636.722706] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9c2a9a4f-a8c9-4343-979d-44e890d2bca3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.725292] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfd1c7b4-3ece-426b-93d5-8da06e7e0483 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.738570] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adda1dd7-8cac-4ff6-8457-a83392a14886 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.743672] env[70020]: DEBUG oslo_vmware.api [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 636.743672] env[70020]: value = "task-3617745" [ 636.743672] env[70020]: _type = "Task" [ 636.743672] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.756472] env[70020]: DEBUG nova.compute.provider_tree [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 636.759235] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29be6e3d-93fd-4418-afca-2b1b9e68ad85 tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 636.763751] env[70020]: DEBUG oslo_vmware.api [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3617745, 'name': Rename_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.867519] env[70020]: DEBUG nova.network.neutron [req-c45791b5-4f7c-489e-b7a9-ceac3a8b84b0 req-3472560f-f6f2-4d4e-b4d0-7d0335a3d34b service nova] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Updated VIF entry in instance network info cache for port 3af5d84e-e814-4689-aa70-e63d58041799. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 636.867519] env[70020]: DEBUG nova.network.neutron [req-c45791b5-4f7c-489e-b7a9-ceac3a8b84b0 req-3472560f-f6f2-4d4e-b4d0-7d0335a3d34b service nova] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Updating instance_info_cache with network_info: [{"id": "3af5d84e-e814-4689-aa70-e63d58041799", "address": "fa:16:3e:05:20:3a", "network": {"id": "b4313295-7611-4fc1-b8ba-667ae1e29303", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1683239800-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.244", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "85abf8ca8009465c87e931b0e9d0fe96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1931669-8959-4e86-a603-e206bcf2b47a", "external-id": "nsx-vlan-transportzone-937", "segmentation_id": 937, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3af5d84e-e8", "ovs_interfaceid": "3af5d84e-e814-4689-aa70-e63d58041799", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 637.253770] env[70020]: DEBUG oslo_vmware.api [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3617745, 'name': Rename_Task, 'duration_secs': 0.224131} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.254378] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 637.254787] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3173fd6f-a8cc-423e-bf7c-4e8c7b134045 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.266633] env[70020]: DEBUG oslo_vmware.api [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 637.266633] env[70020]: value = "task-3617746" [ 637.266633] env[70020]: _type = "Task" [ 637.266633] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.273538] env[70020]: DEBUG oslo_vmware.api [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3617746, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.279158] env[70020]: DEBUG nova.compute.manager [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 637.305506] env[70020]: DEBUG nova.scheduler.client.report [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Updated inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with generation 24 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 637.305506] env[70020]: DEBUG nova.compute.provider_tree [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Updating resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d generation from 24 to 25 during operation: update_inventory {{(pid=70020) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 637.305506] env[70020]: DEBUG nova.compute.provider_tree [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 637.315724] env[70020]: DEBUG nova.virt.hardware [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 637.316107] env[70020]: DEBUG nova.virt.hardware [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 637.316107] env[70020]: DEBUG nova.virt.hardware [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 637.316250] env[70020]: DEBUG nova.virt.hardware [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 637.316396] env[70020]: DEBUG nova.virt.hardware [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 637.316539] env[70020]: DEBUG nova.virt.hardware [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 637.316746] env[70020]: DEBUG nova.virt.hardware [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 637.316898] env[70020]: DEBUG nova.virt.hardware [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 637.317871] env[70020]: DEBUG nova.virt.hardware [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 637.317983] env[70020]: DEBUG nova.virt.hardware [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 637.318369] env[70020]: DEBUG nova.virt.hardware [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 637.319278] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3b3ec84-02b8-4cc4-9c4f-e957d43b163a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.328877] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cb84b07-1d57-40fd-90b7-25a3158c4f18 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.370642] env[70020]: DEBUG oslo_concurrency.lockutils [req-c45791b5-4f7c-489e-b7a9-ceac3a8b84b0 req-3472560f-f6f2-4d4e-b4d0-7d0335a3d34b service nova] Releasing lock "refresh_cache-bb4e4986-af2a-4832-9ec7-777bca863dce" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 637.477749] env[70020]: DEBUG nova.network.neutron [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Successfully created port: 1e8309fb-0a16-407f-9be0-abcac22185ff {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 637.621535] env[70020]: INFO nova.compute.manager [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Rebuilding instance [ 637.671830] env[70020]: DEBUG nova.compute.manager [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 637.673599] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddd248ca-5e24-4424-99de-9786f7b4f209 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.779419] env[70020]: DEBUG oslo_vmware.api [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3617746, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.810483] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.564s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 637.811216] env[70020]: DEBUG nova.compute.manager [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 637.816658] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.074s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 637.816658] env[70020]: INFO nova.compute.claims [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 637.882047] env[70020]: DEBUG nova.network.neutron [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Successfully updated port: 063f2ba9-4e54-4d7e-9771-1defdefc3c00 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 638.232595] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Acquiring lock "0f89d49e-d26c-4d5d-90d7-6f0bf3d67468" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 638.232595] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Lock "0f89d49e-d26c-4d5d-90d7-6f0bf3d67468" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 638.277481] env[70020]: DEBUG oslo_vmware.api [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3617746, 'name': PowerOnVM_Task, 'duration_secs': 0.781394} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.277812] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 638.277976] env[70020]: INFO nova.compute.manager [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Took 10.31 seconds to spawn the instance on the hypervisor. [ 638.278112] env[70020]: DEBUG nova.compute.manager [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 638.279536] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbac0ec6-1336-4670-9f91-df1a9f3e26e2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.322994] env[70020]: DEBUG nova.compute.utils [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 638.328645] env[70020]: DEBUG nova.compute.manager [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 638.328844] env[70020]: DEBUG nova.network.neutron [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 638.384742] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquiring lock "refresh_cache-bc57657e-99e8-46b8-9731-ddd4864a3114" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 638.384928] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquired lock "refresh_cache-bc57657e-99e8-46b8-9731-ddd4864a3114" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 638.385127] env[70020]: DEBUG nova.network.neutron [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 638.482140] env[70020]: DEBUG nova.policy [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd7316cb970904dd8b5b300ec05fb4166', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7412ed0b196c4d44b03bc93b0aae2954', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 638.696222] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 638.696760] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e3365a25-b037-468f-a2af-56d45fa6c4f4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.707065] env[70020]: DEBUG oslo_vmware.api [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Waiting for the task: (returnval){ [ 638.707065] env[70020]: value = "task-3617747" [ 638.707065] env[70020]: _type = "Task" [ 638.707065] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.719666] env[70020]: DEBUG oslo_vmware.api [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Task: {'id': task-3617747, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.735082] env[70020]: DEBUG nova.compute.manager [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 638.790580] env[70020]: DEBUG nova.compute.manager [req-59818f9f-69ea-4ac8-ab77-9f3eb9fb13fb req-e9243061-3cf6-44d2-bad0-f964b421dda4 service nova] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Received event network-vif-deleted-d435ef58-a090-4c51-b69d-6bafa2b6ff27 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 638.801299] env[70020]: INFO nova.compute.manager [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Took 21.44 seconds to build instance. [ 638.837343] env[70020]: DEBUG nova.compute.manager [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 639.221239] env[70020]: DEBUG oslo_vmware.api [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Task: {'id': task-3617747, 'name': PowerOffVM_Task, 'duration_secs': 0.194665} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.224305] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 639.224470] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 639.225796] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6502ebd-594c-4108-b7fd-623c11a3e4b4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.233167] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 639.233369] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4e2f62cd-1445-4ca4-8bd2-c1bec22b6ec4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.258182] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 639.258455] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 639.258727] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Deleting the datastore file [datastore2] b0b825d4-534d-4d54-a0c4-b9e507726c47 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 639.263398] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-07bd702b-5ded-4197-9e33-b833e1a7d553 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.269187] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 639.276559] env[70020]: DEBUG oslo_vmware.api [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Waiting for the task: (returnval){ [ 639.276559] env[70020]: value = "task-3617749" [ 639.276559] env[70020]: _type = "Task" [ 639.276559] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.296031] env[70020]: DEBUG oslo_vmware.api [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Task: {'id': task-3617749, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.309691] env[70020]: DEBUG oslo_concurrency.lockutils [None req-19f006fc-5c22-4019-ba7c-b83df5a03cea tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "ea97f6ab-057e-44d3-835a-68b46d241621" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.976s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 639.310652] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32983227-e0c3-49a4-bf43-bbe8ffd1714b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.319213] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5324739f-02b2-47b9-9469-39e52d9568c2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.355172] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14735a8c-f7e7-45cb-9972-8adf86340a81 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.363504] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fd3b14b-8ab5-483a-8099-ec7a06a4e320 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.369916] env[70020]: DEBUG nova.network.neutron [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 639.385205] env[70020]: DEBUG nova.compute.provider_tree [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 639.789435] env[70020]: DEBUG oslo_vmware.api [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Task: {'id': task-3617749, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.125215} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.789740] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 639.790175] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 639.791201] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 639.816815] env[70020]: DEBUG nova.compute.manager [req-b3ecbb85-ac02-412d-8df5-8dfc52e94cf6 req-9471daa1-7ca8-48aa-b7cd-97a4e58416d8 service nova] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Received event network-changed-16a8d745-ea55-4e94-9513-0b5547738678 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 639.817141] env[70020]: DEBUG nova.compute.manager [req-b3ecbb85-ac02-412d-8df5-8dfc52e94cf6 req-9471daa1-7ca8-48aa-b7cd-97a4e58416d8 service nova] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Refreshing instance network info cache due to event network-changed-16a8d745-ea55-4e94-9513-0b5547738678. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 639.817264] env[70020]: DEBUG oslo_concurrency.lockutils [req-b3ecbb85-ac02-412d-8df5-8dfc52e94cf6 req-9471daa1-7ca8-48aa-b7cd-97a4e58416d8 service nova] Acquiring lock "refresh_cache-79d98176-b566-4349-ad10-c2ea6fdbc657" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 639.817384] env[70020]: DEBUG oslo_concurrency.lockutils [req-b3ecbb85-ac02-412d-8df5-8dfc52e94cf6 req-9471daa1-7ca8-48aa-b7cd-97a4e58416d8 service nova] Acquired lock "refresh_cache-79d98176-b566-4349-ad10-c2ea6fdbc657" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 639.817671] env[70020]: DEBUG nova.network.neutron [req-b3ecbb85-ac02-412d-8df5-8dfc52e94cf6 req-9471daa1-7ca8-48aa-b7cd-97a4e58416d8 service nova] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Refreshing network info cache for port 16a8d745-ea55-4e94-9513-0b5547738678 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 639.856701] env[70020]: DEBUG nova.compute.manager [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 639.887021] env[70020]: DEBUG nova.scheduler.client.report [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 639.893803] env[70020]: DEBUG nova.virt.hardware [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 639.894122] env[70020]: DEBUG nova.virt.hardware [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 639.894329] env[70020]: DEBUG nova.virt.hardware [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 639.894562] env[70020]: DEBUG nova.virt.hardware [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 639.894753] env[70020]: DEBUG nova.virt.hardware [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 639.894921] env[70020]: DEBUG nova.virt.hardware [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 639.895148] env[70020]: DEBUG nova.virt.hardware [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 639.895311] env[70020]: DEBUG nova.virt.hardware [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 639.895498] env[70020]: DEBUG nova.virt.hardware [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 639.895683] env[70020]: DEBUG nova.virt.hardware [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 639.895926] env[70020]: DEBUG nova.virt.hardware [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 639.897340] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4bc08bd-49d1-428e-b25e-5acc373f1c36 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.907013] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4a0fdb1-f21d-4bf0-9cfe-bef55a724e2d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.940654] env[70020]: DEBUG nova.network.neutron [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Successfully created port: 7cbd6812-9369-466e-a269-def6f4b8ed8f {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 640.007396] env[70020]: DEBUG nova.network.neutron [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Updating instance_info_cache with network_info: [{"id": "063f2ba9-4e54-4d7e-9771-1defdefc3c00", "address": "fa:16:3e:28:ab:4e", "network": {"id": "943dd639-07d5-4e71-8d72-01499704fb50", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-408902375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3a2dc07c1d447ea81ca142d80ab4210", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap063f2ba9-4e", "ovs_interfaceid": "063f2ba9-4e54-4d7e-9771-1defdefc3c00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 640.402854] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.587s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 640.402971] env[70020]: DEBUG nova.compute.manager [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 640.408338] env[70020]: DEBUG oslo_concurrency.lockutils [None req-913786dc-d745-4d54-a251-6dc9c1a60ea9 tempest-DeleteServersAdminTestJSON-409072902 tempest-DeleteServersAdminTestJSON-409072902-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.760s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 640.408338] env[70020]: DEBUG nova.objects.instance [None req-913786dc-d745-4d54-a251-6dc9c1a60ea9 tempest-DeleteServersAdminTestJSON-409072902 tempest-DeleteServersAdminTestJSON-409072902-project-admin] Lazy-loading 'resources' on Instance uuid 516341a3-2230-4340-a1e0-ff97bb7a608d {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 640.511856] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Releasing lock "refresh_cache-bc57657e-99e8-46b8-9731-ddd4864a3114" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 640.515034] env[70020]: DEBUG nova.compute.manager [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Instance network_info: |[{"id": "063f2ba9-4e54-4d7e-9771-1defdefc3c00", "address": "fa:16:3e:28:ab:4e", "network": {"id": "943dd639-07d5-4e71-8d72-01499704fb50", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-408902375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3a2dc07c1d447ea81ca142d80ab4210", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap063f2ba9-4e", "ovs_interfaceid": "063f2ba9-4e54-4d7e-9771-1defdefc3c00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 640.515669] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:28:ab:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39ab9baf-90cd-4fe2-8d56-434f8210fc19', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '063f2ba9-4e54-4d7e-9771-1defdefc3c00', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 640.533880] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 640.534622] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 640.535576] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-60e92a93-15c2-4f11-a6c9-6b4385eabf96 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.574150] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 640.574150] env[70020]: value = "task-3617750" [ 640.574150] env[70020]: _type = "Task" [ 640.574150] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.580164] env[70020]: DEBUG oslo_concurrency.lockutils [None req-93a43920-00ea-49d0-9ee8-939bbe650a83 tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Acquiring lock "a0b4a0b0-748d-46eb-9e39-3f21e394c090" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 640.580540] env[70020]: DEBUG oslo_concurrency.lockutils [None req-93a43920-00ea-49d0-9ee8-939bbe650a83 tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Lock "a0b4a0b0-748d-46eb-9e39-3f21e394c090" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 640.580834] env[70020]: DEBUG oslo_concurrency.lockutils [None req-93a43920-00ea-49d0-9ee8-939bbe650a83 tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Acquiring lock "a0b4a0b0-748d-46eb-9e39-3f21e394c090-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 640.580997] env[70020]: DEBUG oslo_concurrency.lockutils [None req-93a43920-00ea-49d0-9ee8-939bbe650a83 tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Lock "a0b4a0b0-748d-46eb-9e39-3f21e394c090-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 640.581199] env[70020]: DEBUG oslo_concurrency.lockutils [None req-93a43920-00ea-49d0-9ee8-939bbe650a83 tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Lock "a0b4a0b0-748d-46eb-9e39-3f21e394c090-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 640.583782] env[70020]: INFO nova.compute.manager [None req-93a43920-00ea-49d0-9ee8-939bbe650a83 tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Terminating instance [ 640.588694] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617750, 'name': CreateVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.844594] env[70020]: DEBUG nova.virt.hardware [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 640.844976] env[70020]: DEBUG nova.virt.hardware [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 640.845080] env[70020]: DEBUG nova.virt.hardware [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 640.845198] env[70020]: DEBUG nova.virt.hardware [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 640.845343] env[70020]: DEBUG nova.virt.hardware [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 640.845487] env[70020]: DEBUG nova.virt.hardware [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 640.845697] env[70020]: DEBUG nova.virt.hardware [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 640.845850] env[70020]: DEBUG nova.virt.hardware [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 640.846016] env[70020]: DEBUG nova.virt.hardware [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 640.846724] env[70020]: DEBUG nova.virt.hardware [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 640.846927] env[70020]: DEBUG nova.virt.hardware [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 640.848359] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7bd422d-99fa-4bf6-8119-f8dedfac6aab {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.859296] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c93e424b-dc70-41c9-ad04-af0609c39fa5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.886299] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Instance VIF info [] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 640.894884] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 640.895249] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 640.895501] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0b28d232-1908-471a-a6f0-ade38041d048 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.909467] env[70020]: DEBUG nova.compute.utils [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 640.911484] env[70020]: DEBUG nova.compute.manager [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 640.911702] env[70020]: DEBUG nova.network.neutron [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 640.918578] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 640.918578] env[70020]: value = "task-3617751" [ 640.918578] env[70020]: _type = "Task" [ 640.918578] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.928300] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617751, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.089691] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617750, 'name': CreateVM_Task, 'duration_secs': 0.390729} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.090625] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 641.092107] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.092107] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 641.092249] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 641.093607] env[70020]: DEBUG nova.compute.manager [None req-93a43920-00ea-49d0-9ee8-939bbe650a83 tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 641.093607] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-93a43920-00ea-49d0-9ee8-939bbe650a83 tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 641.093607] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78283883-0d05-45ff-a3c5-38f099545ec1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.095877] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42679721-bdf9-4e28-967a-30c81ddef493 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.103301] env[70020]: DEBUG oslo_vmware.api [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 641.103301] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52303dd7-af39-e5f9-90f1-42023a05a5b0" [ 641.103301] env[70020]: _type = "Task" [ 641.103301] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.109831] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-93a43920-00ea-49d0-9ee8-939bbe650a83 tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 641.114380] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-536b0d9f-5a02-476b-be9b-4c6d41821f69 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.125850] env[70020]: DEBUG oslo_vmware.api [None req-93a43920-00ea-49d0-9ee8-939bbe650a83 tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Waiting for the task: (returnval){ [ 641.125850] env[70020]: value = "task-3617752" [ 641.125850] env[70020]: _type = "Task" [ 641.125850] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.126109] env[70020]: DEBUG oslo_vmware.api [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52303dd7-af39-e5f9-90f1-42023a05a5b0, 'name': SearchDatastore_Task, 'duration_secs': 0.012128} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.129674] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 641.132879] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 641.132879] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.132879] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 641.132879] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 641.141332] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d4a99bc9-a9a0-4fc2-9d52-cc96282f3b24 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.150733] env[70020]: DEBUG oslo_vmware.api [None req-93a43920-00ea-49d0-9ee8-939bbe650a83 tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Task: {'id': task-3617752, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.152176] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 641.152339] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 641.153153] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-592927e2-7378-4304-83c1-07805b735ddb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.159309] env[70020]: DEBUG oslo_vmware.api [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 641.159309] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52c5ffbb-e167-e113-1c2f-a1573bcf33aa" [ 641.159309] env[70020]: _type = "Task" [ 641.159309] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.174910] env[70020]: DEBUG oslo_vmware.api [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52c5ffbb-e167-e113-1c2f-a1573bcf33aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.312129] env[70020]: DEBUG nova.policy [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c32498a6608a43dab8045aef0b3006e7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '051ddf351c534f65be94aef74fb2ff03', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 641.360256] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8c9d4fa-8169-4465-8b8e-9abe7c7a79ee {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.368146] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a34ccbc-d060-4b63-92f3-57d7a05e1575 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.405111] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-580e4478-c853-42cc-9a19-8d5e69285763 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.413390] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd3c00ca-8f02-439e-a61f-b46d189900f5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.417740] env[70020]: DEBUG nova.compute.manager [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 641.442507] env[70020]: DEBUG nova.compute.provider_tree [None req-913786dc-d745-4d54-a251-6dc9c1a60ea9 tempest-DeleteServersAdminTestJSON-409072902 tempest-DeleteServersAdminTestJSON-409072902-project-admin] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 641.447903] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617751, 'name': CreateVM_Task, 'duration_secs': 0.430195} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.449537] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 641.449537] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.449537] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 641.449537] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 641.449537] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee73e251-f2c5-4723-a6c0-87f5684c364d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.454471] env[70020]: DEBUG oslo_vmware.api [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Waiting for the task: (returnval){ [ 641.454471] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52cbff09-9898-c177-510a-4a07ba8cdf2d" [ 641.454471] env[70020]: _type = "Task" [ 641.454471] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.464743] env[70020]: DEBUG oslo_vmware.api [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52cbff09-9898-c177-510a-4a07ba8cdf2d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.490255] env[70020]: DEBUG nova.network.neutron [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Successfully updated port: 1e8309fb-0a16-407f-9be0-abcac22185ff {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 641.644052] env[70020]: DEBUG oslo_vmware.api [None req-93a43920-00ea-49d0-9ee8-939bbe650a83 tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Task: {'id': task-3617752, 'name': PowerOffVM_Task, 'duration_secs': 0.22752} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.644052] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-93a43920-00ea-49d0-9ee8-939bbe650a83 tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 641.644052] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-93a43920-00ea-49d0-9ee8-939bbe650a83 tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 641.644052] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-20b993df-5939-4d0e-a2a5-8321d3e0742b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.672449] env[70020]: DEBUG oslo_vmware.api [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52c5ffbb-e167-e113-1c2f-a1573bcf33aa, 'name': SearchDatastore_Task, 'duration_secs': 0.010865} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.672449] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc8b5984-39d1-475d-a623-fe1e2668883b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.679013] env[70020]: DEBUG oslo_vmware.api [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 641.679013] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]522c2d19-a6f8-b916-7483-d9561c18fa30" [ 641.679013] env[70020]: _type = "Task" [ 641.679013] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.687556] env[70020]: DEBUG oslo_vmware.api [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]522c2d19-a6f8-b916-7483-d9561c18fa30, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.704367] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-93a43920-00ea-49d0-9ee8-939bbe650a83 tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 641.704367] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-93a43920-00ea-49d0-9ee8-939bbe650a83 tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 641.704367] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-93a43920-00ea-49d0-9ee8-939bbe650a83 tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Deleting the datastore file [datastore2] a0b4a0b0-748d-46eb-9e39-3f21e394c090 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 641.704660] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6f6f338d-7c4b-42d0-8ba1-83007c500218 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.711565] env[70020]: DEBUG oslo_vmware.api [None req-93a43920-00ea-49d0-9ee8-939bbe650a83 tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Waiting for the task: (returnval){ [ 641.711565] env[70020]: value = "task-3617754" [ 641.711565] env[70020]: _type = "Task" [ 641.711565] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.720327] env[70020]: DEBUG oslo_vmware.api [None req-93a43920-00ea-49d0-9ee8-939bbe650a83 tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Task: {'id': task-3617754, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.838447] env[70020]: DEBUG nova.network.neutron [req-b3ecbb85-ac02-412d-8df5-8dfc52e94cf6 req-9471daa1-7ca8-48aa-b7cd-97a4e58416d8 service nova] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Updated VIF entry in instance network info cache for port 16a8d745-ea55-4e94-9513-0b5547738678. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 641.838766] env[70020]: DEBUG nova.network.neutron [req-b3ecbb85-ac02-412d-8df5-8dfc52e94cf6 req-9471daa1-7ca8-48aa-b7cd-97a4e58416d8 service nova] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Updating instance_info_cache with network_info: [{"id": "16a8d745-ea55-4e94-9513-0b5547738678", "address": "fa:16:3e:fe:cb:ea", "network": {"id": "53830a9c-7c1f-4b18-ad07-2379bc00d366", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1462386900-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.229", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4f57b434a8be4f14923fe65d0ed24a72", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16a8d745-ea", "ovs_interfaceid": "16a8d745-ea55-4e94-9513-0b5547738678", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 641.943689] env[70020]: DEBUG nova.scheduler.client.report [None req-913786dc-d745-4d54-a251-6dc9c1a60ea9 tempest-DeleteServersAdminTestJSON-409072902 tempest-DeleteServersAdminTestJSON-409072902-project-admin] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 641.966905] env[70020]: DEBUG oslo_vmware.api [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52cbff09-9898-c177-510a-4a07ba8cdf2d, 'name': SearchDatastore_Task, 'duration_secs': 0.011482} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.967233] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 641.967464] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 641.967673] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.992789] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Acquiring lock "refresh_cache-d0756709-f17b-441e-b537-df937cfbde84" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.993069] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Acquired lock "refresh_cache-d0756709-f17b-441e-b537-df937cfbde84" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 641.993154] env[70020]: DEBUG nova.network.neutron [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 642.191793] env[70020]: DEBUG oslo_vmware.api [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]522c2d19-a6f8-b916-7483-d9561c18fa30, 'name': SearchDatastore_Task, 'duration_secs': 0.01431} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.192097] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 642.192929] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] bc57657e-99e8-46b8-9731-ddd4864a3114/bc57657e-99e8-46b8-9731-ddd4864a3114.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 642.192929] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 642.192929] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 642.193156] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-38c10b92-5254-4c26-9be8-64fb01d93b0a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.196038] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1cc400d0-ed72-424e-b70e-a1715933e590 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.204644] env[70020]: DEBUG oslo_vmware.api [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 642.204644] env[70020]: value = "task-3617755" [ 642.204644] env[70020]: _type = "Task" [ 642.204644] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.211109] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 642.211649] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 642.212547] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6989693e-f2da-48a8-8c7a-72f783cf149a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.224229] env[70020]: DEBUG oslo_vmware.api [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3617755, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.228431] env[70020]: DEBUG oslo_vmware.api [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Waiting for the task: (returnval){ [ 642.228431] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]520d25a8-243f-5a8a-6540-8b086b77a63b" [ 642.228431] env[70020]: _type = "Task" [ 642.228431] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.232878] env[70020]: DEBUG oslo_vmware.api [None req-93a43920-00ea-49d0-9ee8-939bbe650a83 tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Task: {'id': task-3617754, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165536} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.236654] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-93a43920-00ea-49d0-9ee8-939bbe650a83 tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 642.236893] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-93a43920-00ea-49d0-9ee8-939bbe650a83 tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 642.237617] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-93a43920-00ea-49d0-9ee8-939bbe650a83 tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 642.238021] env[70020]: INFO nova.compute.manager [None req-93a43920-00ea-49d0-9ee8-939bbe650a83 tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Took 1.14 seconds to destroy the instance on the hypervisor. [ 642.238136] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-93a43920-00ea-49d0-9ee8-939bbe650a83 tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 642.238345] env[70020]: DEBUG nova.compute.manager [-] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 642.238462] env[70020]: DEBUG nova.network.neutron [-] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 642.246283] env[70020]: DEBUG oslo_vmware.api [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]520d25a8-243f-5a8a-6540-8b086b77a63b, 'name': SearchDatastore_Task, 'duration_secs': 0.013672} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.247137] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f65e50b-d794-440d-bce6-abbf65c5cbe9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.252737] env[70020]: DEBUG oslo_vmware.api [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Waiting for the task: (returnval){ [ 642.252737] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]524b25f3-b91a-684e-8661-6f52c7049154" [ 642.252737] env[70020]: _type = "Task" [ 642.252737] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.260890] env[70020]: DEBUG oslo_vmware.api [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]524b25f3-b91a-684e-8661-6f52c7049154, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.341611] env[70020]: DEBUG oslo_concurrency.lockutils [req-b3ecbb85-ac02-412d-8df5-8dfc52e94cf6 req-9471daa1-7ca8-48aa-b7cd-97a4e58416d8 service nova] Releasing lock "refresh_cache-79d98176-b566-4349-ad10-c2ea6fdbc657" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 642.432641] env[70020]: DEBUG nova.compute.manager [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 642.449889] env[70020]: DEBUG oslo_concurrency.lockutils [None req-913786dc-d745-4d54-a251-6dc9c1a60ea9 tempest-DeleteServersAdminTestJSON-409072902 tempest-DeleteServersAdminTestJSON-409072902-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.043s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 642.455651] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.852s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 642.458101] env[70020]: INFO nova.compute.claims [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 642.480205] env[70020]: DEBUG nova.virt.hardware [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 642.480351] env[70020]: DEBUG nova.virt.hardware [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 642.480583] env[70020]: DEBUG nova.virt.hardware [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 642.480833] env[70020]: DEBUG nova.virt.hardware [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 642.482380] env[70020]: DEBUG nova.virt.hardware [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 642.482470] env[70020]: DEBUG nova.virt.hardware [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 642.482699] env[70020]: DEBUG nova.virt.hardware [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 642.483884] env[70020]: DEBUG nova.virt.hardware [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 642.484100] env[70020]: DEBUG nova.virt.hardware [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 642.484741] env[70020]: DEBUG nova.virt.hardware [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 642.484741] env[70020]: DEBUG nova.virt.hardware [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 642.485330] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26b77412-d8a3-43d7-8743-835695af3136 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.491017] env[70020]: INFO nova.scheduler.client.report [None req-913786dc-d745-4d54-a251-6dc9c1a60ea9 tempest-DeleteServersAdminTestJSON-409072902 tempest-DeleteServersAdminTestJSON-409072902-project-admin] Deleted allocations for instance 516341a3-2230-4340-a1e0-ff97bb7a608d [ 642.503115] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7e88297-808f-47b6-a4e9-fd437ff923c0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.536238] env[70020]: DEBUG oslo_vmware.rw_handles [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524a2ca6-a3a8-a8fd-2aeb-829b72293655/disk-0.vmdk. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 642.537604] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e62fc7f3-3886-46e7-8ae9-708da8992b08 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.543962] env[70020]: DEBUG oslo_vmware.rw_handles [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524a2ca6-a3a8-a8fd-2aeb-829b72293655/disk-0.vmdk is in state: ready. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 642.544131] env[70020]: ERROR oslo_vmware.rw_handles [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524a2ca6-a3a8-a8fd-2aeb-829b72293655/disk-0.vmdk due to incomplete transfer. [ 642.544715] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-9360efe0-954f-4886-893b-7152a5af7770 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.552694] env[70020]: DEBUG oslo_vmware.rw_handles [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524a2ca6-a3a8-a8fd-2aeb-829b72293655/disk-0.vmdk. {{(pid=70020) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 642.553022] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Uploaded image 0f059e4b-3d4c-472a-b58c-6cdbc63a03a0 to the Glance image server {{(pid=70020) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 642.556193] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Destroying the VM {{(pid=70020) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 642.556770] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-4098525a-db0c-421e-80dc-1f795009a62b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.564979] env[70020]: DEBUG oslo_vmware.api [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 642.564979] env[70020]: value = "task-3617756" [ 642.564979] env[70020]: _type = "Task" [ 642.564979] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.575841] env[70020]: DEBUG oslo_vmware.api [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617756, 'name': Destroy_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.716133] env[70020]: DEBUG oslo_vmware.api [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3617755, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.760902] env[70020]: DEBUG nova.network.neutron [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 642.770545] env[70020]: DEBUG oslo_vmware.api [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]524b25f3-b91a-684e-8661-6f52c7049154, 'name': SearchDatastore_Task, 'duration_secs': 0.012704} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.771117] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 642.771392] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] b0b825d4-534d-4d54-a0c4-b9e507726c47/b0b825d4-534d-4d54-a0c4-b9e507726c47.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 642.771731] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-66fd31f9-7e71-476b-a721-5d2069e76d87 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.784043] env[70020]: DEBUG oslo_vmware.api [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Waiting for the task: (returnval){ [ 642.784043] env[70020]: value = "task-3617757" [ 642.784043] env[70020]: _type = "Task" [ 642.784043] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.795300] env[70020]: DEBUG oslo_vmware.api [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Task: {'id': task-3617757, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.008872] env[70020]: DEBUG oslo_concurrency.lockutils [None req-913786dc-d745-4d54-a251-6dc9c1a60ea9 tempest-DeleteServersAdminTestJSON-409072902 tempest-DeleteServersAdminTestJSON-409072902-project-admin] Lock "516341a3-2230-4340-a1e0-ff97bb7a608d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.368s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 643.077358] env[70020]: DEBUG oslo_vmware.api [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617756, 'name': Destroy_Task} progress is 33%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.220732] env[70020]: DEBUG oslo_vmware.api [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3617755, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.781752} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.221056] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] bc57657e-99e8-46b8-9731-ddd4864a3114/bc57657e-99e8-46b8-9731-ddd4864a3114.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 643.221280] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 643.221662] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9f285fd1-c08c-4c5d-9c99-467dc7473c6c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.229654] env[70020]: DEBUG oslo_vmware.api [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 643.229654] env[70020]: value = "task-3617758" [ 643.229654] env[70020]: _type = "Task" [ 643.229654] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.240664] env[70020]: DEBUG oslo_vmware.api [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3617758, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.255500] env[70020]: DEBUG nova.compute.manager [req-73556765-c4c5-41a3-b656-d780b93f6335 req-703baad5-2b70-43f6-9c57-86472c705faf service nova] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Received event network-vif-plugged-063f2ba9-4e54-4d7e-9771-1defdefc3c00 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 643.256667] env[70020]: DEBUG oslo_concurrency.lockutils [req-73556765-c4c5-41a3-b656-d780b93f6335 req-703baad5-2b70-43f6-9c57-86472c705faf service nova] Acquiring lock "bc57657e-99e8-46b8-9731-ddd4864a3114-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 643.257686] env[70020]: DEBUG oslo_concurrency.lockutils [req-73556765-c4c5-41a3-b656-d780b93f6335 req-703baad5-2b70-43f6-9c57-86472c705faf service nova] Lock "bc57657e-99e8-46b8-9731-ddd4864a3114-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 643.257686] env[70020]: DEBUG oslo_concurrency.lockutils [req-73556765-c4c5-41a3-b656-d780b93f6335 req-703baad5-2b70-43f6-9c57-86472c705faf service nova] Lock "bc57657e-99e8-46b8-9731-ddd4864a3114-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 643.257686] env[70020]: DEBUG nova.compute.manager [req-73556765-c4c5-41a3-b656-d780b93f6335 req-703baad5-2b70-43f6-9c57-86472c705faf service nova] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] No waiting events found dispatching network-vif-plugged-063f2ba9-4e54-4d7e-9771-1defdefc3c00 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 643.261649] env[70020]: WARNING nova.compute.manager [req-73556765-c4c5-41a3-b656-d780b93f6335 req-703baad5-2b70-43f6-9c57-86472c705faf service nova] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Received unexpected event network-vif-plugged-063f2ba9-4e54-4d7e-9771-1defdefc3c00 for instance with vm_state building and task_state spawning. [ 643.261649] env[70020]: DEBUG nova.compute.manager [req-73556765-c4c5-41a3-b656-d780b93f6335 req-703baad5-2b70-43f6-9c57-86472c705faf service nova] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Received event network-changed-063f2ba9-4e54-4d7e-9771-1defdefc3c00 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 643.261649] env[70020]: DEBUG nova.compute.manager [req-73556765-c4c5-41a3-b656-d780b93f6335 req-703baad5-2b70-43f6-9c57-86472c705faf service nova] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Refreshing instance network info cache due to event network-changed-063f2ba9-4e54-4d7e-9771-1defdefc3c00. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 643.261649] env[70020]: DEBUG oslo_concurrency.lockutils [req-73556765-c4c5-41a3-b656-d780b93f6335 req-703baad5-2b70-43f6-9c57-86472c705faf service nova] Acquiring lock "refresh_cache-bc57657e-99e8-46b8-9731-ddd4864a3114" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 643.261649] env[70020]: DEBUG oslo_concurrency.lockutils [req-73556765-c4c5-41a3-b656-d780b93f6335 req-703baad5-2b70-43f6-9c57-86472c705faf service nova] Acquired lock "refresh_cache-bc57657e-99e8-46b8-9731-ddd4864a3114" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 643.261891] env[70020]: DEBUG nova.network.neutron [req-73556765-c4c5-41a3-b656-d780b93f6335 req-703baad5-2b70-43f6-9c57-86472c705faf service nova] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Refreshing network info cache for port 063f2ba9-4e54-4d7e-9771-1defdefc3c00 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 643.297579] env[70020]: DEBUG oslo_vmware.api [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Task: {'id': task-3617757, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.544861] env[70020]: DEBUG nova.network.neutron [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Updating instance_info_cache with network_info: [{"id": "1e8309fb-0a16-407f-9be0-abcac22185ff", "address": "fa:16:3e:50:0d:87", "network": {"id": "b9cd5a16-4e28-4b20-983f-20891a232625", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1417869427-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "8444a0cca1194f8e9e190e02a82e9556", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e8309fb-0a", "ovs_interfaceid": "1e8309fb-0a16-407f-9be0-abcac22185ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.558039] env[70020]: DEBUG nova.network.neutron [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Successfully created port: b1d9f41a-978e-4fe2-bb42-1a2ab68ce1b2 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 643.582671] env[70020]: DEBUG oslo_vmware.api [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617756, 'name': Destroy_Task, 'duration_secs': 0.549799} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.585660] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Destroyed the VM [ 643.586125] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Deleting Snapshot of the VM instance {{(pid=70020) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 643.587524] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-37c97659-1782-41b3-b46b-51d148abc3eb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.598203] env[70020]: DEBUG oslo_vmware.api [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 643.598203] env[70020]: value = "task-3617759" [ 643.598203] env[70020]: _type = "Task" [ 643.598203] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.613502] env[70020]: DEBUG oslo_vmware.api [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617759, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.750166] env[70020]: DEBUG oslo_vmware.api [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3617758, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.236283} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.750166] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 643.751496] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe623771-e295-460b-a38d-b78b66ca98a0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.782164] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Reconfiguring VM instance instance-0000000e to attach disk [datastore2] bc57657e-99e8-46b8-9731-ddd4864a3114/bc57657e-99e8-46b8-9731-ddd4864a3114.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 643.785871] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0616b193-4e76-4701-8c58-1fea3f9190d8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.813736] env[70020]: DEBUG oslo_vmware.api [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Task: {'id': task-3617757, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.607841} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.816144] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] b0b825d4-534d-4d54-a0c4-b9e507726c47/b0b825d4-534d-4d54-a0c4-b9e507726c47.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 643.816144] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 643.816144] env[70020]: DEBUG oslo_vmware.api [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 643.816144] env[70020]: value = "task-3617760" [ 643.816144] env[70020]: _type = "Task" [ 643.816144] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.818821] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2d7be893-fa7d-4be3-846c-26c8016d8a02 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.830091] env[70020]: DEBUG oslo_vmware.api [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Waiting for the task: (returnval){ [ 643.830091] env[70020]: value = "task-3617761" [ 643.830091] env[70020]: _type = "Task" [ 643.830091] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.836011] env[70020]: DEBUG oslo_vmware.api [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3617760, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.844806] env[70020]: DEBUG oslo_vmware.api [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Task: {'id': task-3617761, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.926055] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d44014c-0ed7-4fea-b35f-2c4548f14f12 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.935821] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-667ab3c0-d9ba-4bff-9cf1-135b600eeac5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.939924] env[70020]: DEBUG nova.network.neutron [-] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.973954] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7565188-d261-4198-b757-0ef3abfcd661 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.984463] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d715be20-0a2b-4139-99cf-f57820e5aa29 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.001725] env[70020]: DEBUG nova.compute.provider_tree [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 644.048978] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Releasing lock "refresh_cache-d0756709-f17b-441e-b537-df937cfbde84" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 644.049339] env[70020]: DEBUG nova.compute.manager [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Instance network_info: |[{"id": "1e8309fb-0a16-407f-9be0-abcac22185ff", "address": "fa:16:3e:50:0d:87", "network": {"id": "b9cd5a16-4e28-4b20-983f-20891a232625", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1417869427-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "8444a0cca1194f8e9e190e02a82e9556", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e8309fb-0a", "ovs_interfaceid": "1e8309fb-0a16-407f-9be0-abcac22185ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 644.049743] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:0d:87', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'abe48956-848a-4e1f-b1f1-a27baa5390b9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1e8309fb-0a16-407f-9be0-abcac22185ff', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 644.059848] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Creating folder: Project (8444a0cca1194f8e9e190e02a82e9556). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 644.060162] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-86039953-a829-4507-aba2-c7d403865c05 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.070341] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Created folder: Project (8444a0cca1194f8e9e190e02a82e9556) in parent group-v721521. [ 644.070664] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Creating folder: Instances. Parent ref: group-v721566. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 644.071394] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-59327465-e991-4bed-b064-e012aa4126c5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.079701] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Created folder: Instances in parent group-v721566. [ 644.079954] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 644.080155] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0756709-f17b-441e-b537-df937cfbde84] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 644.080358] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ea4630d9-ac92-4467-8665-1822b7424e0e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.111778] env[70020]: DEBUG oslo_vmware.api [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617759, 'name': RemoveSnapshot_Task, 'duration_secs': 0.390838} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.113523] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Deleted Snapshot of the VM instance {{(pid=70020) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 644.113903] env[70020]: INFO nova.compute.manager [None req-65643575-6a72-4a65-b464-570b892bf6e3 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Took 15.51 seconds to snapshot the instance on the hypervisor. [ 644.116243] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 644.116243] env[70020]: value = "task-3617764" [ 644.116243] env[70020]: _type = "Task" [ 644.116243] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.125415] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617764, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.334614] env[70020]: DEBUG oslo_vmware.api [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3617760, 'name': ReconfigVM_Task, 'duration_secs': 0.305757} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.340361] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Reconfigured VM instance instance-0000000e to attach disk [datastore2] bc57657e-99e8-46b8-9731-ddd4864a3114/bc57657e-99e8-46b8-9731-ddd4864a3114.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 644.341784] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fabaca97-a42a-4e5c-a61c-330dd916577c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.358626] env[70020]: DEBUG oslo_vmware.api [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Task: {'id': task-3617761, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.122513} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.358626] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 644.358626] env[70020]: DEBUG oslo_vmware.api [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 644.358626] env[70020]: value = "task-3617765" [ 644.358626] env[70020]: _type = "Task" [ 644.358626] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.358626] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7080ece9-bb30-421a-b569-288a79e38419 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.370229] env[70020]: DEBUG nova.network.neutron [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Successfully updated port: 7cbd6812-9369-466e-a269-def6f4b8ed8f {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 644.396229] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Reconfiguring VM instance instance-00000007 to attach disk [datastore2] b0b825d4-534d-4d54-a0c4-b9e507726c47/b0b825d4-534d-4d54-a0c4-b9e507726c47.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 644.396795] env[70020]: DEBUG oslo_vmware.api [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3617765, 'name': Rename_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.398505] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Acquiring lock "refresh_cache-08ce6bc8-30fe-4c63-80e1-26c84ae75702" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.404015] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Acquired lock "refresh_cache-08ce6bc8-30fe-4c63-80e1-26c84ae75702" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 644.404015] env[70020]: DEBUG nova.network.neutron [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 644.404015] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d7076e1c-f575-4286-852a-02c83fa5f6f5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.432839] env[70020]: DEBUG oslo_vmware.api [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Waiting for the task: (returnval){ [ 644.432839] env[70020]: value = "task-3617766" [ 644.432839] env[70020]: _type = "Task" [ 644.432839] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.448179] env[70020]: INFO nova.compute.manager [-] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Took 2.21 seconds to deallocate network for instance. [ 644.448179] env[70020]: DEBUG oslo_vmware.api [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Task: {'id': task-3617766, 'name': ReconfigVM_Task} progress is 10%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.504935] env[70020]: DEBUG nova.scheduler.client.report [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 644.574753] env[70020]: DEBUG nova.network.neutron [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 644.646165] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617764, 'name': CreateVM_Task, 'duration_secs': 0.365301} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.646165] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0756709-f17b-441e-b537-df937cfbde84] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 644.646165] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.646165] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 644.646165] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 644.646165] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c47e3228-6bc0-47c9-a02b-3c1f139559a5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.651506] env[70020]: DEBUG oslo_vmware.api [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Waiting for the task: (returnval){ [ 644.651506] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]526d8431-c9dd-9e9a-0e77-92f87a9c7aa9" [ 644.651506] env[70020]: _type = "Task" [ 644.651506] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.660083] env[70020]: DEBUG oslo_vmware.api [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]526d8431-c9dd-9e9a-0e77-92f87a9c7aa9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.823377] env[70020]: DEBUG nova.network.neutron [req-73556765-c4c5-41a3-b656-d780b93f6335 req-703baad5-2b70-43f6-9c57-86472c705faf service nova] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Updated VIF entry in instance network info cache for port 063f2ba9-4e54-4d7e-9771-1defdefc3c00. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 644.823610] env[70020]: DEBUG nova.network.neutron [req-73556765-c4c5-41a3-b656-d780b93f6335 req-703baad5-2b70-43f6-9c57-86472c705faf service nova] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Updating instance_info_cache with network_info: [{"id": "063f2ba9-4e54-4d7e-9771-1defdefc3c00", "address": "fa:16:3e:28:ab:4e", "network": {"id": "943dd639-07d5-4e71-8d72-01499704fb50", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-408902375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3a2dc07c1d447ea81ca142d80ab4210", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap063f2ba9-4e", "ovs_interfaceid": "063f2ba9-4e54-4d7e-9771-1defdefc3c00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 644.880442] env[70020]: DEBUG oslo_vmware.api [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3617765, 'name': Rename_Task, 'duration_secs': 0.177486} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.880442] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 644.880442] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-509caefd-ef66-4f6d-bb55-9ca2191befeb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.885673] env[70020]: DEBUG oslo_vmware.api [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 644.885673] env[70020]: value = "task-3617767" [ 644.885673] env[70020]: _type = "Task" [ 644.885673] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.895871] env[70020]: DEBUG oslo_vmware.api [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3617767, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.951502] env[70020]: DEBUG oslo_vmware.api [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Task: {'id': task-3617766, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.955705] env[70020]: DEBUG oslo_concurrency.lockutils [None req-93a43920-00ea-49d0-9ee8-939bbe650a83 tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 645.014090] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.560s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 645.014680] env[70020]: DEBUG nova.compute.manager [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 645.017654] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.412s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 645.019350] env[70020]: INFO nova.compute.claims [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 645.120443] env[70020]: DEBUG nova.network.neutron [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Updating instance_info_cache with network_info: [{"id": "7cbd6812-9369-466e-a269-def6f4b8ed8f", "address": "fa:16:3e:99:62:fe", "network": {"id": "1aa98d4f-b271-4d4d-b8d3-156403999ea9", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1993994671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7412ed0b196c4d44b03bc93b0aae2954", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "503991c4-44d0-42d9-aa03-5259331f1051", "external-id": "nsx-vlan-transportzone-3", "segmentation_id": 3, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cbd6812-93", "ovs_interfaceid": "7cbd6812-9369-466e-a269-def6f4b8ed8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 645.171798] env[70020]: DEBUG oslo_vmware.api [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]526d8431-c9dd-9e9a-0e77-92f87a9c7aa9, 'name': SearchDatastore_Task, 'duration_secs': 0.008742} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.171798] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 645.171927] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 645.172234] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.173029] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 645.173029] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 645.173153] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0efb2989-7308-4e26-82be-3892057af103 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.186972] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 645.186972] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 645.186972] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2548d46b-64a1-4cce-8945-00145bb7b61a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.196162] env[70020]: DEBUG nova.compute.manager [req-d9e91423-a203-46eb-ba35-6fe3a50c8a59 req-351b2111-5096-420f-8eb7-16b6ada129e4 service nova] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Received event network-vif-deleted-505b9f3d-c597-4acb-8477-fd64b8ea5de1 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 645.200240] env[70020]: DEBUG oslo_vmware.api [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Waiting for the task: (returnval){ [ 645.200240] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52500ee1-9199-80b6-3ab5-57525f40f3f5" [ 645.200240] env[70020]: _type = "Task" [ 645.200240] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.209051] env[70020]: DEBUG oslo_vmware.api [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52500ee1-9199-80b6-3ab5-57525f40f3f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.327960] env[70020]: DEBUG oslo_concurrency.lockutils [req-73556765-c4c5-41a3-b656-d780b93f6335 req-703baad5-2b70-43f6-9c57-86472c705faf service nova] Releasing lock "refresh_cache-bc57657e-99e8-46b8-9731-ddd4864a3114" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 645.327960] env[70020]: DEBUG nova.compute.manager [req-73556765-c4c5-41a3-b656-d780b93f6335 req-703baad5-2b70-43f6-9c57-86472c705faf service nova] [instance: d0756709-f17b-441e-b537-df937cfbde84] Received event network-vif-plugged-1e8309fb-0a16-407f-9be0-abcac22185ff {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 645.327960] env[70020]: DEBUG oslo_concurrency.lockutils [req-73556765-c4c5-41a3-b656-d780b93f6335 req-703baad5-2b70-43f6-9c57-86472c705faf service nova] Acquiring lock "d0756709-f17b-441e-b537-df937cfbde84-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 645.327960] env[70020]: DEBUG oslo_concurrency.lockutils [req-73556765-c4c5-41a3-b656-d780b93f6335 req-703baad5-2b70-43f6-9c57-86472c705faf service nova] Lock "d0756709-f17b-441e-b537-df937cfbde84-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 645.327960] env[70020]: DEBUG oslo_concurrency.lockutils [req-73556765-c4c5-41a3-b656-d780b93f6335 req-703baad5-2b70-43f6-9c57-86472c705faf service nova] Lock "d0756709-f17b-441e-b537-df937cfbde84-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 645.328284] env[70020]: DEBUG nova.compute.manager [req-73556765-c4c5-41a3-b656-d780b93f6335 req-703baad5-2b70-43f6-9c57-86472c705faf service nova] [instance: d0756709-f17b-441e-b537-df937cfbde84] No waiting events found dispatching network-vif-plugged-1e8309fb-0a16-407f-9be0-abcac22185ff {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 645.328284] env[70020]: WARNING nova.compute.manager [req-73556765-c4c5-41a3-b656-d780b93f6335 req-703baad5-2b70-43f6-9c57-86472c705faf service nova] [instance: d0756709-f17b-441e-b537-df937cfbde84] Received unexpected event network-vif-plugged-1e8309fb-0a16-407f-9be0-abcac22185ff for instance with vm_state building and task_state spawning. [ 645.397021] env[70020]: DEBUG oslo_vmware.api [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3617767, 'name': PowerOnVM_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.447084] env[70020]: DEBUG oslo_vmware.api [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Task: {'id': task-3617766, 'name': ReconfigVM_Task, 'duration_secs': 0.622715} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.447371] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Reconfigured VM instance instance-00000007 to attach disk [datastore2] b0b825d4-534d-4d54-a0c4-b9e507726c47/b0b825d4-534d-4d54-a0c4-b9e507726c47.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 645.448143] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bb965875-1883-4506-803e-3c9e7721206e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.457086] env[70020]: DEBUG oslo_vmware.api [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Waiting for the task: (returnval){ [ 645.457086] env[70020]: value = "task-3617768" [ 645.457086] env[70020]: _type = "Task" [ 645.457086] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.467493] env[70020]: DEBUG oslo_vmware.api [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Task: {'id': task-3617768, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.528849] env[70020]: DEBUG nova.compute.utils [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 645.532985] env[70020]: DEBUG oslo_concurrency.lockutils [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Acquiring lock "832a38c8-ed3a-460b-91bd-0138d2f2d03d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 645.532985] env[70020]: DEBUG oslo_concurrency.lockutils [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Lock "832a38c8-ed3a-460b-91bd-0138d2f2d03d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 645.541313] env[70020]: DEBUG nova.compute.manager [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 645.541597] env[70020]: DEBUG nova.network.neutron [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 645.623518] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Releasing lock "refresh_cache-08ce6bc8-30fe-4c63-80e1-26c84ae75702" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 645.623859] env[70020]: DEBUG nova.compute.manager [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Instance network_info: |[{"id": "7cbd6812-9369-466e-a269-def6f4b8ed8f", "address": "fa:16:3e:99:62:fe", "network": {"id": "1aa98d4f-b271-4d4d-b8d3-156403999ea9", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1993994671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7412ed0b196c4d44b03bc93b0aae2954", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "503991c4-44d0-42d9-aa03-5259331f1051", "external-id": "nsx-vlan-transportzone-3", "segmentation_id": 3, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cbd6812-93", "ovs_interfaceid": "7cbd6812-9369-466e-a269-def6f4b8ed8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 645.624385] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:99:62:fe', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '503991c4-44d0-42d9-aa03-5259331f1051', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7cbd6812-9369-466e-a269-def6f4b8ed8f', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 645.634154] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Creating folder: Project (7412ed0b196c4d44b03bc93b0aae2954). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 645.634478] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-54288853-9172-4f05-9d18-a406ac2740e0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.649185] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Created folder: Project (7412ed0b196c4d44b03bc93b0aae2954) in parent group-v721521. [ 645.649410] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Creating folder: Instances. Parent ref: group-v721569. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 645.649949] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bbf75466-e3e2-4c9b-a00e-c4f579f23afd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.658514] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Created folder: Instances in parent group-v721569. [ 645.658821] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 645.658929] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 645.659144] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2979353c-ae04-439e-8aaa-82aa938c1462 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.681045] env[70020]: DEBUG nova.policy [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1ebabdad8aa843f28165fcd167382c60', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cfa7d3b1f5a14c60b19cde5030c2f0a2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 645.685836] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 645.685836] env[70020]: value = "task-3617771" [ 645.685836] env[70020]: _type = "Task" [ 645.685836] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.697628] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617771, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.703033] env[70020]: DEBUG oslo_concurrency.lockutils [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Acquiring lock "50ce7a0c-aa80-4816-b84e-d8ff7b10fffb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 645.703033] env[70020]: DEBUG oslo_concurrency.lockutils [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Lock "50ce7a0c-aa80-4816-b84e-d8ff7b10fffb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 645.718539] env[70020]: DEBUG oslo_vmware.api [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52500ee1-9199-80b6-3ab5-57525f40f3f5, 'name': SearchDatastore_Task, 'duration_secs': 0.009825} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.719456] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8dbba91e-04b7-44da-ad53-ce9ac17ea9d1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.725081] env[70020]: DEBUG oslo_vmware.api [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Waiting for the task: (returnval){ [ 645.725081] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52ccb6b9-3b9f-3b2f-27b9-d9bb08edcfa8" [ 645.725081] env[70020]: _type = "Task" [ 645.725081] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.735186] env[70020]: DEBUG oslo_vmware.api [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ccb6b9-3b9f-3b2f-27b9-d9bb08edcfa8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.903211] env[70020]: DEBUG oslo_vmware.api [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3617767, 'name': PowerOnVM_Task, 'duration_secs': 0.532394} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.903211] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 645.903211] env[70020]: INFO nova.compute.manager [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Took 12.30 seconds to spawn the instance on the hypervisor. [ 645.903211] env[70020]: DEBUG nova.compute.manager [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 645.903211] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5d42105-d4d9-4252-a1fe-7fcd44d76ccc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.972677] env[70020]: DEBUG oslo_vmware.api [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Task: {'id': task-3617768, 'name': Rename_Task, 'duration_secs': 0.138768} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.973233] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 645.973656] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2b94803e-59cd-4d22-9d61-92d73218b306 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.987425] env[70020]: DEBUG oslo_vmware.api [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Waiting for the task: (returnval){ [ 645.987425] env[70020]: value = "task-3617772" [ 645.987425] env[70020]: _type = "Task" [ 645.987425] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.006532] env[70020]: DEBUG oslo_vmware.api [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Task: {'id': task-3617772, 'name': PowerOnVM_Task} progress is 33%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.043633] env[70020]: DEBUG nova.compute.manager [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 646.057025] env[70020]: DEBUG nova.compute.manager [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 646.205692] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617771, 'name': CreateVM_Task, 'duration_secs': 0.366127} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.206024] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 646.207580] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 646.207648] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 646.207937] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 646.208881] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5017fe1-c26d-41b4-90f8-dcedade46744 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.212207] env[70020]: DEBUG nova.compute.manager [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 646.222935] env[70020]: DEBUG oslo_vmware.api [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for the task: (returnval){ [ 646.222935] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5269a877-3fcc-6cca-a939-5f1dfde803f0" [ 646.222935] env[70020]: _type = "Task" [ 646.222935] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.233454] env[70020]: DEBUG oslo_vmware.api [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5269a877-3fcc-6cca-a939-5f1dfde803f0, 'name': SearchDatastore_Task, 'duration_secs': 0.014558} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.235100] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 646.235100] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 646.235433] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 646.240913] env[70020]: DEBUG oslo_vmware.api [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ccb6b9-3b9f-3b2f-27b9-d9bb08edcfa8, 'name': SearchDatastore_Task, 'duration_secs': 0.010697} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.241186] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 646.241597] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] d0756709-f17b-441e-b537-df937cfbde84/d0756709-f17b-441e-b537-df937cfbde84.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 646.241840] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 646.242105] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 646.242191] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1dde5eae-d8fb-44f1-b818-a721ca381304 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.247101] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6cce45dd-a86e-41cb-899e-3e0e05572453 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.255889] env[70020]: DEBUG oslo_vmware.api [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Waiting for the task: (returnval){ [ 646.255889] env[70020]: value = "task-3617773" [ 646.255889] env[70020]: _type = "Task" [ 646.255889] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.262245] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 646.262431] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 646.267678] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b65a270a-1608-49a1-89e6-98abd2628b57 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.279528] env[70020]: DEBUG oslo_vmware.api [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617773, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.283851] env[70020]: DEBUG oslo_vmware.api [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for the task: (returnval){ [ 646.283851] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5249dda1-f2dd-5cca-e55f-a964296e8790" [ 646.283851] env[70020]: _type = "Task" [ 646.283851] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.300849] env[70020]: DEBUG oslo_vmware.api [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5249dda1-f2dd-5cca-e55f-a964296e8790, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.432800] env[70020]: INFO nova.compute.manager [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Took 24.67 seconds to build instance. [ 646.467313] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Acquiring lock "7cf7f0a9-8240-4e78-b5d4-b1eb1da60764" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 646.467550] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Lock "7cf7f0a9-8240-4e78-b5d4-b1eb1da60764" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 646.497598] env[70020]: DEBUG oslo_vmware.api [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Task: {'id': task-3617772, 'name': PowerOnVM_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.506341] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Acquiring lock "c4335d00-29a3-4f2e-b826-1a78ef02e0bf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 646.506564] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Lock "c4335d00-29a3-4f2e-b826-1a78ef02e0bf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 646.545457] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15281b1a-7a77-4c72-9c60-2378122d2eec {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.559946] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b30b080-d310-473d-8798-1767d999b907 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.607872] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eae2e006-e373-4567-a66b-5fb840e9ce78 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.616809] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43700b6a-60b1-457c-b54f-581215f04382 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.632316] env[70020]: DEBUG nova.compute.provider_tree [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 646.635233] env[70020]: DEBUG oslo_concurrency.lockutils [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 646.754778] env[70020]: DEBUG oslo_concurrency.lockutils [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 646.771045] env[70020]: DEBUG oslo_vmware.api [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617773, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.805776] env[70020]: DEBUG nova.network.neutron [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Successfully updated port: b1d9f41a-978e-4fe2-bb42-1a2ab68ce1b2 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 646.807384] env[70020]: DEBUG oslo_vmware.api [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5249dda1-f2dd-5cca-e55f-a964296e8790, 'name': SearchDatastore_Task, 'duration_secs': 0.020586} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.808465] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5afddfec-38b4-4bb0-acfb-dd88a1b4a94b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.818825] env[70020]: DEBUG oslo_vmware.api [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for the task: (returnval){ [ 646.818825] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52a13867-e20d-139b-ba4a-dc83e698393c" [ 646.818825] env[70020]: _type = "Task" [ 646.818825] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.826813] env[70020]: DEBUG oslo_vmware.api [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52a13867-e20d-139b-ba4a-dc83e698393c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.935356] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9c4176dc-f518-4259-a1fc-d3599d3288dd tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "bc57657e-99e8-46b8-9731-ddd4864a3114" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.188s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 647.000863] env[70020]: DEBUG oslo_vmware.api [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Task: {'id': task-3617772, 'name': PowerOnVM_Task, 'duration_secs': 0.51521} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.001149] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 647.001337] env[70020]: DEBUG nova.compute.manager [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 647.002243] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dc07633-cba5-473b-b551-9a54958e038e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.067135] env[70020]: DEBUG nova.compute.manager [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 647.110194] env[70020]: DEBUG nova.virt.hardware [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 647.110446] env[70020]: DEBUG nova.virt.hardware [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 647.110614] env[70020]: DEBUG nova.virt.hardware [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 647.111732] env[70020]: DEBUG nova.virt.hardware [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 647.111908] env[70020]: DEBUG nova.virt.hardware [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 647.112088] env[70020]: DEBUG nova.virt.hardware [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 647.112282] env[70020]: DEBUG nova.virt.hardware [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 647.112446] env[70020]: DEBUG nova.virt.hardware [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 647.112611] env[70020]: DEBUG nova.virt.hardware [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 647.112774] env[70020]: DEBUG nova.virt.hardware [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 647.112946] env[70020]: DEBUG nova.virt.hardware [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 647.114612] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5ca6286-8c3d-4864-a62f-e50ec4b350ca {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.123208] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63ec49a3-515a-4728-b449-5b7be5246254 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.127928] env[70020]: DEBUG nova.network.neutron [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Successfully created port: 75bc1ac5-ec35-4914-abff-0fc9c5eb7bf9 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 647.147461] env[70020]: DEBUG nova.scheduler.client.report [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 647.246213] env[70020]: DEBUG nova.compute.manager [req-4320c1b1-5e59-4c00-bd06-977b430fd70e req-8a65c234-edf6-409e-90cf-3c80564a68a1 service nova] [instance: d0756709-f17b-441e-b537-df937cfbde84] Received event network-changed-1e8309fb-0a16-407f-9be0-abcac22185ff {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 647.246213] env[70020]: DEBUG nova.compute.manager [req-4320c1b1-5e59-4c00-bd06-977b430fd70e req-8a65c234-edf6-409e-90cf-3c80564a68a1 service nova] [instance: d0756709-f17b-441e-b537-df937cfbde84] Refreshing instance network info cache due to event network-changed-1e8309fb-0a16-407f-9be0-abcac22185ff. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 647.246213] env[70020]: DEBUG oslo_concurrency.lockutils [req-4320c1b1-5e59-4c00-bd06-977b430fd70e req-8a65c234-edf6-409e-90cf-3c80564a68a1 service nova] Acquiring lock "refresh_cache-d0756709-f17b-441e-b537-df937cfbde84" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.246213] env[70020]: DEBUG oslo_concurrency.lockutils [req-4320c1b1-5e59-4c00-bd06-977b430fd70e req-8a65c234-edf6-409e-90cf-3c80564a68a1 service nova] Acquired lock "refresh_cache-d0756709-f17b-441e-b537-df937cfbde84" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 647.246604] env[70020]: DEBUG nova.network.neutron [req-4320c1b1-5e59-4c00-bd06-977b430fd70e req-8a65c234-edf6-409e-90cf-3c80564a68a1 service nova] [instance: d0756709-f17b-441e-b537-df937cfbde84] Refreshing network info cache for port 1e8309fb-0a16-407f-9be0-abcac22185ff {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 647.269433] env[70020]: DEBUG oslo_vmware.api [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617773, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.649651} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.269433] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] d0756709-f17b-441e-b537-df937cfbde84/d0756709-f17b-441e-b537-df937cfbde84.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 647.269433] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 647.269433] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-31c6b5ea-3848-4fee-b480-946dd0c672e1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.278951] env[70020]: DEBUG oslo_vmware.api [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Waiting for the task: (returnval){ [ 647.278951] env[70020]: value = "task-3617774" [ 647.278951] env[70020]: _type = "Task" [ 647.278951] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.296886] env[70020]: DEBUG oslo_vmware.api [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617774, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.310541] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquiring lock "refresh_cache-4b5750d4-98ec-4c70-b214-fad97060b606" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.310541] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquired lock "refresh_cache-4b5750d4-98ec-4c70-b214-fad97060b606" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 647.310541] env[70020]: DEBUG nova.network.neutron [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 647.335502] env[70020]: DEBUG oslo_vmware.api [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52a13867-e20d-139b-ba4a-dc83e698393c, 'name': SearchDatastore_Task, 'duration_secs': 0.062508} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.338674] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 647.338674] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 08ce6bc8-30fe-4c63-80e1-26c84ae75702/08ce6bc8-30fe-4c63-80e1-26c84ae75702.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 647.338674] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2c5a6aeb-e6e2-4419-a896-018e925e8eb1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.345643] env[70020]: DEBUG oslo_vmware.api [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for the task: (returnval){ [ 647.345643] env[70020]: value = "task-3617775" [ 647.345643] env[70020]: _type = "Task" [ 647.345643] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.354045] env[70020]: DEBUG oslo_vmware.api [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3617775, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.390886] env[70020]: DEBUG oslo_concurrency.lockutils [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Acquiring lock "f53cb08c-0939-4cb1-8476-8b289d6a1b05" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 647.391748] env[70020]: DEBUG oslo_concurrency.lockutils [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Lock "f53cb08c-0939-4cb1-8476-8b289d6a1b05" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 647.439777] env[70020]: DEBUG nova.compute.manager [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 647.526348] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 647.653686] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.636s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 647.654202] env[70020]: DEBUG nova.compute.manager [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 647.663268] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9d9e1da6-68b7-4cf8-80ac-f6587328453a tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.546s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 647.663268] env[70020]: DEBUG nova.objects.instance [None req-9d9e1da6-68b7-4cf8-80ac-f6587328453a tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Lazy-loading 'resources' on Instance uuid 0cc49db6-1574-4e51-8692-b79ee14bc25d {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 647.792891] env[70020]: DEBUG oslo_vmware.api [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617774, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069995} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.797017] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 647.797017] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea0321a7-57f7-47ba-a5c2-a079b16b7c98 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.837170] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Reconfiguring VM instance instance-00000010 to attach disk [datastore2] d0756709-f17b-441e-b537-df937cfbde84/d0756709-f17b-441e-b537-df937cfbde84.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 647.842272] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eaa5478d-1473-481c-90bc-983f34b5581f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.876330] env[70020]: DEBUG oslo_vmware.api [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3617775, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.878414] env[70020]: DEBUG oslo_vmware.api [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Waiting for the task: (returnval){ [ 647.878414] env[70020]: value = "task-3617776" [ 647.878414] env[70020]: _type = "Task" [ 647.878414] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.889326] env[70020]: DEBUG oslo_vmware.api [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617776, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.984686] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 647.990199] env[70020]: DEBUG nova.network.neutron [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 648.080266] env[70020]: DEBUG nova.compute.manager [req-1d2d9212-953d-4db2-b56a-0e2f5c443c7d req-aa362a03-7ffd-4809-b3f7-587b1f371051 service nova] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Received event network-vif-plugged-b1d9f41a-978e-4fe2-bb42-1a2ab68ce1b2 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 648.080537] env[70020]: DEBUG oslo_concurrency.lockutils [req-1d2d9212-953d-4db2-b56a-0e2f5c443c7d req-aa362a03-7ffd-4809-b3f7-587b1f371051 service nova] Acquiring lock "4b5750d4-98ec-4c70-b214-fad97060b606-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 648.080805] env[70020]: DEBUG oslo_concurrency.lockutils [req-1d2d9212-953d-4db2-b56a-0e2f5c443c7d req-aa362a03-7ffd-4809-b3f7-587b1f371051 service nova] Lock "4b5750d4-98ec-4c70-b214-fad97060b606-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 648.081177] env[70020]: DEBUG oslo_concurrency.lockutils [req-1d2d9212-953d-4db2-b56a-0e2f5c443c7d req-aa362a03-7ffd-4809-b3f7-587b1f371051 service nova] Lock "4b5750d4-98ec-4c70-b214-fad97060b606-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 648.081450] env[70020]: DEBUG nova.compute.manager [req-1d2d9212-953d-4db2-b56a-0e2f5c443c7d req-aa362a03-7ffd-4809-b3f7-587b1f371051 service nova] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] No waiting events found dispatching network-vif-plugged-b1d9f41a-978e-4fe2-bb42-1a2ab68ce1b2 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 648.081586] env[70020]: WARNING nova.compute.manager [req-1d2d9212-953d-4db2-b56a-0e2f5c443c7d req-aa362a03-7ffd-4809-b3f7-587b1f371051 service nova] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Received unexpected event network-vif-plugged-b1d9f41a-978e-4fe2-bb42-1a2ab68ce1b2 for instance with vm_state building and task_state spawning. [ 648.081746] env[70020]: DEBUG nova.compute.manager [req-1d2d9212-953d-4db2-b56a-0e2f5c443c7d req-aa362a03-7ffd-4809-b3f7-587b1f371051 service nova] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Received event network-changed-b1d9f41a-978e-4fe2-bb42-1a2ab68ce1b2 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 648.081893] env[70020]: DEBUG nova.compute.manager [req-1d2d9212-953d-4db2-b56a-0e2f5c443c7d req-aa362a03-7ffd-4809-b3f7-587b1f371051 service nova] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Refreshing instance network info cache due to event network-changed-b1d9f41a-978e-4fe2-bb42-1a2ab68ce1b2. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 648.082084] env[70020]: DEBUG oslo_concurrency.lockutils [req-1d2d9212-953d-4db2-b56a-0e2f5c443c7d req-aa362a03-7ffd-4809-b3f7-587b1f371051 service nova] Acquiring lock "refresh_cache-4b5750d4-98ec-4c70-b214-fad97060b606" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 648.166315] env[70020]: DEBUG nova.compute.utils [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 648.175441] env[70020]: DEBUG nova.compute.manager [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 648.175441] env[70020]: DEBUG nova.network.neutron [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 648.261255] env[70020]: DEBUG nova.network.neutron [req-4320c1b1-5e59-4c00-bd06-977b430fd70e req-8a65c234-edf6-409e-90cf-3c80564a68a1 service nova] [instance: d0756709-f17b-441e-b537-df937cfbde84] Updated VIF entry in instance network info cache for port 1e8309fb-0a16-407f-9be0-abcac22185ff. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 648.261666] env[70020]: DEBUG nova.network.neutron [req-4320c1b1-5e59-4c00-bd06-977b430fd70e req-8a65c234-edf6-409e-90cf-3c80564a68a1 service nova] [instance: d0756709-f17b-441e-b537-df937cfbde84] Updating instance_info_cache with network_info: [{"id": "1e8309fb-0a16-407f-9be0-abcac22185ff", "address": "fa:16:3e:50:0d:87", "network": {"id": "b9cd5a16-4e28-4b20-983f-20891a232625", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1417869427-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "8444a0cca1194f8e9e190e02a82e9556", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e8309fb-0a", "ovs_interfaceid": "1e8309fb-0a16-407f-9be0-abcac22185ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.284733] env[70020]: DEBUG nova.policy [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1ba0368399ad4bbdb8b2cc95f6c8f187', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '852583dc12774b19bffbb2b0791e8336', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 648.378138] env[70020]: DEBUG oslo_vmware.api [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3617775, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.7853} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.378459] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 08ce6bc8-30fe-4c63-80e1-26c84ae75702/08ce6bc8-30fe-4c63-80e1-26c84ae75702.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 648.378682] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 648.378993] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a4979d22-ead5-476e-8ad6-4680db862f8c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.393800] env[70020]: DEBUG nova.network.neutron [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Updating instance_info_cache with network_info: [{"id": "b1d9f41a-978e-4fe2-bb42-1a2ab68ce1b2", "address": "fa:16:3e:a9:91:ab", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.238", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1d9f41a-97", "ovs_interfaceid": "b1d9f41a-978e-4fe2-bb42-1a2ab68ce1b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.400441] env[70020]: DEBUG oslo_vmware.api [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617776, 'name': ReconfigVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.400441] env[70020]: DEBUG oslo_vmware.api [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for the task: (returnval){ [ 648.400441] env[70020]: value = "task-3617777" [ 648.400441] env[70020]: _type = "Task" [ 648.400441] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.411261] env[70020]: DEBUG oslo_vmware.api [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3617777, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.636997] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93b3c66a-13a8-4f26-a317-9aced16fb211 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.648739] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af22921e-d4c3-450b-a617-b8ba9c7fcf92 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.689613] env[70020]: DEBUG oslo_concurrency.lockutils [None req-121efe45-7a44-4df3-9ad7-2a711718e9b4 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "81d5a1b4-1398-4fca-b500-aa2a3dc41494" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 648.689613] env[70020]: DEBUG oslo_concurrency.lockutils [None req-121efe45-7a44-4df3-9ad7-2a711718e9b4 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "81d5a1b4-1398-4fca-b500-aa2a3dc41494" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 648.689613] env[70020]: DEBUG oslo_concurrency.lockutils [None req-121efe45-7a44-4df3-9ad7-2a711718e9b4 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "81d5a1b4-1398-4fca-b500-aa2a3dc41494-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 648.689613] env[70020]: DEBUG oslo_concurrency.lockutils [None req-121efe45-7a44-4df3-9ad7-2a711718e9b4 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "81d5a1b4-1398-4fca-b500-aa2a3dc41494-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 648.690262] env[70020]: DEBUG oslo_concurrency.lockutils [None req-121efe45-7a44-4df3-9ad7-2a711718e9b4 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "81d5a1b4-1398-4fca-b500-aa2a3dc41494-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 648.691273] env[70020]: DEBUG nova.compute.manager [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 648.694390] env[70020]: INFO nova.compute.manager [None req-121efe45-7a44-4df3-9ad7-2a711718e9b4 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Terminating instance [ 648.696766] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aa87b2b-193e-4d7d-a350-a49e2f76bcfc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.711527] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48bb373e-e925-4188-9c7b-78bc3da3f7cc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.728349] env[70020]: DEBUG nova.compute.provider_tree [None req-9d9e1da6-68b7-4cf8-80ac-f6587328453a tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 648.769153] env[70020]: DEBUG oslo_concurrency.lockutils [req-4320c1b1-5e59-4c00-bd06-977b430fd70e req-8a65c234-edf6-409e-90cf-3c80564a68a1 service nova] Releasing lock "refresh_cache-d0756709-f17b-441e-b537-df937cfbde84" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 648.769316] env[70020]: DEBUG nova.compute.manager [req-4320c1b1-5e59-4c00-bd06-977b430fd70e req-8a65c234-edf6-409e-90cf-3c80564a68a1 service nova] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Received event network-vif-plugged-7cbd6812-9369-466e-a269-def6f4b8ed8f {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 648.770041] env[70020]: DEBUG oslo_concurrency.lockutils [req-4320c1b1-5e59-4c00-bd06-977b430fd70e req-8a65c234-edf6-409e-90cf-3c80564a68a1 service nova] Acquiring lock "08ce6bc8-30fe-4c63-80e1-26c84ae75702-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 648.771032] env[70020]: DEBUG oslo_concurrency.lockutils [req-4320c1b1-5e59-4c00-bd06-977b430fd70e req-8a65c234-edf6-409e-90cf-3c80564a68a1 service nova] Lock "08ce6bc8-30fe-4c63-80e1-26c84ae75702-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 648.771032] env[70020]: DEBUG oslo_concurrency.lockutils [req-4320c1b1-5e59-4c00-bd06-977b430fd70e req-8a65c234-edf6-409e-90cf-3c80564a68a1 service nova] Lock "08ce6bc8-30fe-4c63-80e1-26c84ae75702-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 648.771032] env[70020]: DEBUG nova.compute.manager [req-4320c1b1-5e59-4c00-bd06-977b430fd70e req-8a65c234-edf6-409e-90cf-3c80564a68a1 service nova] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] No waiting events found dispatching network-vif-plugged-7cbd6812-9369-466e-a269-def6f4b8ed8f {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 648.771152] env[70020]: WARNING nova.compute.manager [req-4320c1b1-5e59-4c00-bd06-977b430fd70e req-8a65c234-edf6-409e-90cf-3c80564a68a1 service nova] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Received unexpected event network-vif-plugged-7cbd6812-9369-466e-a269-def6f4b8ed8f for instance with vm_state building and task_state spawning. [ 648.772256] env[70020]: DEBUG nova.compute.manager [req-4320c1b1-5e59-4c00-bd06-977b430fd70e req-8a65c234-edf6-409e-90cf-3c80564a68a1 service nova] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Received event network-changed-7cbd6812-9369-466e-a269-def6f4b8ed8f {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 648.772442] env[70020]: DEBUG nova.compute.manager [req-4320c1b1-5e59-4c00-bd06-977b430fd70e req-8a65c234-edf6-409e-90cf-3c80564a68a1 service nova] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Refreshing instance network info cache due to event network-changed-7cbd6812-9369-466e-a269-def6f4b8ed8f. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 648.773052] env[70020]: DEBUG oslo_concurrency.lockutils [req-4320c1b1-5e59-4c00-bd06-977b430fd70e req-8a65c234-edf6-409e-90cf-3c80564a68a1 service nova] Acquiring lock "refresh_cache-08ce6bc8-30fe-4c63-80e1-26c84ae75702" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 648.773052] env[70020]: DEBUG oslo_concurrency.lockutils [req-4320c1b1-5e59-4c00-bd06-977b430fd70e req-8a65c234-edf6-409e-90cf-3c80564a68a1 service nova] Acquired lock "refresh_cache-08ce6bc8-30fe-4c63-80e1-26c84ae75702" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 648.773052] env[70020]: DEBUG nova.network.neutron [req-4320c1b1-5e59-4c00-bd06-977b430fd70e req-8a65c234-edf6-409e-90cf-3c80564a68a1 service nova] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Refreshing network info cache for port 7cbd6812-9369-466e-a269-def6f4b8ed8f {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 648.894491] env[70020]: DEBUG oslo_vmware.api [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617776, 'name': ReconfigVM_Task, 'duration_secs': 0.590755} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.895380] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Reconfigured VM instance instance-00000010 to attach disk [datastore2] d0756709-f17b-441e-b537-df937cfbde84/d0756709-f17b-441e-b537-df937cfbde84.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 648.899641] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b03fd6bd-e996-4bcb-9ee3-eff5d5d42f5d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.904655] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Releasing lock "refresh_cache-4b5750d4-98ec-4c70-b214-fad97060b606" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 648.904655] env[70020]: DEBUG nova.compute.manager [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Instance network_info: |[{"id": "b1d9f41a-978e-4fe2-bb42-1a2ab68ce1b2", "address": "fa:16:3e:a9:91:ab", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.238", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1d9f41a-97", "ovs_interfaceid": "b1d9f41a-978e-4fe2-bb42-1a2ab68ce1b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 648.909147] env[70020]: DEBUG oslo_concurrency.lockutils [req-1d2d9212-953d-4db2-b56a-0e2f5c443c7d req-aa362a03-7ffd-4809-b3f7-587b1f371051 service nova] Acquired lock "refresh_cache-4b5750d4-98ec-4c70-b214-fad97060b606" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 648.909147] env[70020]: DEBUG nova.network.neutron [req-1d2d9212-953d-4db2-b56a-0e2f5c443c7d req-aa362a03-7ffd-4809-b3f7-587b1f371051 service nova] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Refreshing network info cache for port b1d9f41a-978e-4fe2-bb42-1a2ab68ce1b2 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 648.909147] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a9:91:ab', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '822050c7-1845-485d-b87e-73778d21c33c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b1d9f41a-978e-4fe2-bb42-1a2ab68ce1b2', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 648.913434] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Creating folder: Project (051ddf351c534f65be94aef74fb2ff03). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 648.920015] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-01958837-93e7-49ef-a1d9-979b3e3bb5bd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.925992] env[70020]: DEBUG oslo_vmware.api [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Waiting for the task: (returnval){ [ 648.925992] env[70020]: value = "task-3617778" [ 648.925992] env[70020]: _type = "Task" [ 648.925992] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.931879] env[70020]: DEBUG oslo_vmware.api [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3617777, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084398} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.934500] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 648.937237] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-371b8d01-9d94-4c23-bb33-321cb9395571 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.940847] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Created folder: Project (051ddf351c534f65be94aef74fb2ff03) in parent group-v721521. [ 648.941039] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Creating folder: Instances. Parent ref: group-v721572. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 648.941809] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f3babae7-f81d-4540-8256-5f72bc0d267d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.947836] env[70020]: DEBUG oslo_vmware.api [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617778, 'name': Rename_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.968371] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Reconfiguring VM instance instance-0000000f to attach disk [datastore2] 08ce6bc8-30fe-4c63-80e1-26c84ae75702/08ce6bc8-30fe-4c63-80e1-26c84ae75702.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 648.970237] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-05459aad-403c-46d8-83c2-37be773a2550 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.986829] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Created folder: Instances in parent group-v721572. [ 648.987123] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 648.987449] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 648.988072] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-118e0abe-fd37-4674-bbde-6231fd6eb26d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.006230] env[70020]: DEBUG oslo_vmware.api [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for the task: (returnval){ [ 649.006230] env[70020]: value = "task-3617781" [ 649.006230] env[70020]: _type = "Task" [ 649.006230] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.011218] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 649.011218] env[70020]: value = "task-3617782" [ 649.011218] env[70020]: _type = "Task" [ 649.011218] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.018490] env[70020]: DEBUG oslo_vmware.api [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3617781, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.023807] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617782, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.203681] env[70020]: DEBUG nova.compute.manager [None req-121efe45-7a44-4df3-9ad7-2a711718e9b4 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 649.204381] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-121efe45-7a44-4df3-9ad7-2a711718e9b4 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 649.205049] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88ac260e-2e15-4592-8d4f-252eff5bc54e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.220032] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-121efe45-7a44-4df3-9ad7-2a711718e9b4 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 649.220385] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-66a57d7c-7379-46d7-8837-aa98625308e7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.231759] env[70020]: DEBUG nova.scheduler.client.report [None req-9d9e1da6-68b7-4cf8-80ac-f6587328453a tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 649.289769] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-121efe45-7a44-4df3-9ad7-2a711718e9b4 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 649.289769] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-121efe45-7a44-4df3-9ad7-2a711718e9b4 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 649.289769] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-121efe45-7a44-4df3-9ad7-2a711718e9b4 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Deleting the datastore file [datastore2] 81d5a1b4-1398-4fca-b500-aa2a3dc41494 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 649.289769] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-15eced2b-12fe-4b66-80c0-7beba22246d7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.300756] env[70020]: DEBUG oslo_vmware.api [None req-121efe45-7a44-4df3-9ad7-2a711718e9b4 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 649.300756] env[70020]: value = "task-3617784" [ 649.300756] env[70020]: _type = "Task" [ 649.300756] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.313874] env[70020]: DEBUG oslo_vmware.api [None req-121efe45-7a44-4df3-9ad7-2a711718e9b4 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617784, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.404529] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5aba528f-b966-4595-8002-007d724299f6 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Acquiring lock "b0b825d4-534d-4d54-a0c4-b9e507726c47" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 649.404529] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5aba528f-b966-4595-8002-007d724299f6 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Lock "b0b825d4-534d-4d54-a0c4-b9e507726c47" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 649.405133] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5aba528f-b966-4595-8002-007d724299f6 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Acquiring lock "b0b825d4-534d-4d54-a0c4-b9e507726c47-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 649.405392] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5aba528f-b966-4595-8002-007d724299f6 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Lock "b0b825d4-534d-4d54-a0c4-b9e507726c47-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 649.405604] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5aba528f-b966-4595-8002-007d724299f6 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Lock "b0b825d4-534d-4d54-a0c4-b9e507726c47-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 649.409243] env[70020]: INFO nova.compute.manager [None req-5aba528f-b966-4595-8002-007d724299f6 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Terminating instance [ 649.451538] env[70020]: DEBUG oslo_vmware.api [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617778, 'name': Rename_Task, 'duration_secs': 0.254749} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.452532] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 649.452532] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f5c5ef22-b7e3-4c14-8b8f-85b0aae00343 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.461817] env[70020]: DEBUG oslo_vmware.api [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Waiting for the task: (returnval){ [ 649.461817] env[70020]: value = "task-3617785" [ 649.461817] env[70020]: _type = "Task" [ 649.461817] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.470751] env[70020]: DEBUG oslo_vmware.api [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617785, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.521266] env[70020]: DEBUG oslo_vmware.api [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3617781, 'name': ReconfigVM_Task, 'duration_secs': 0.421642} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.522033] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Reconfigured VM instance instance-0000000f to attach disk [datastore2] 08ce6bc8-30fe-4c63-80e1-26c84ae75702/08ce6bc8-30fe-4c63-80e1-26c84ae75702.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 649.523304] env[70020]: DEBUG nova.network.neutron [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Successfully created port: c5dfdf74-4ed7-460a-b458-cb45cbc910c2 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 649.525267] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-10ac5413-716c-4ad2-a876-1476e5862ba8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.530478] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617782, 'name': CreateVM_Task, 'duration_secs': 0.480632} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.533584] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 649.534306] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.534504] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 649.534884] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 649.535486] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a5d1e2e-44ca-49c2-bdd8-af76002e8bbe {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.538524] env[70020]: DEBUG oslo_vmware.api [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for the task: (returnval){ [ 649.538524] env[70020]: value = "task-3617786" [ 649.538524] env[70020]: _type = "Task" [ 649.538524] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.544377] env[70020]: DEBUG oslo_vmware.api [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for the task: (returnval){ [ 649.544377] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5292876c-44c4-a75d-b485-32218e1f2544" [ 649.544377] env[70020]: _type = "Task" [ 649.544377] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.550950] env[70020]: DEBUG oslo_vmware.api [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3617786, 'name': Rename_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.558257] env[70020]: DEBUG oslo_vmware.api [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5292876c-44c4-a75d-b485-32218e1f2544, 'name': SearchDatastore_Task, 'duration_secs': 0.012788} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.558968] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 649.559433] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 649.559596] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.559794] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 649.560154] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 649.560521] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2329b1b1-c8e2-4375-833e-315e866bdc66 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.570287] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 649.572700] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 649.572700] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48f2e322-2ba2-438d-94ce-4d69e602cbc0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.576924] env[70020]: DEBUG oslo_vmware.api [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for the task: (returnval){ [ 649.576924] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52ffa5e6-48fd-be60-c483-4ce8df2d8630" [ 649.576924] env[70020]: _type = "Task" [ 649.576924] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.585280] env[70020]: DEBUG oslo_vmware.api [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ffa5e6-48fd-be60-c483-4ce8df2d8630, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.709410] env[70020]: DEBUG nova.compute.manager [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 649.746171] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9d9e1da6-68b7-4cf8-80ac-f6587328453a tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.083s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 649.751746] env[70020]: DEBUG nova.virt.hardware [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 649.751954] env[70020]: DEBUG nova.virt.hardware [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 649.752134] env[70020]: DEBUG nova.virt.hardware [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 649.752313] env[70020]: DEBUG nova.virt.hardware [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 649.752452] env[70020]: DEBUG nova.virt.hardware [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 649.752596] env[70020]: DEBUG nova.virt.hardware [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 649.752801] env[70020]: DEBUG nova.virt.hardware [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 649.752952] env[70020]: DEBUG nova.virt.hardware [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 649.753152] env[70020]: DEBUG nova.virt.hardware [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 649.753323] env[70020]: DEBUG nova.virt.hardware [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 649.753493] env[70020]: DEBUG nova.virt.hardware [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 649.754256] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 15.639s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 649.754455] env[70020]: DEBUG nova.objects.instance [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Trying to apply a migration context that does not seem to be set for this instance {{(pid=70020) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 649.758221] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59807b89-c378-4bc5-bb47-be223ca8638a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.769718] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7701d8ea-9e5d-48f5-8c05-1a7306044b0e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.787433] env[70020]: INFO nova.scheduler.client.report [None req-9d9e1da6-68b7-4cf8-80ac-f6587328453a tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Deleted allocations for instance 0cc49db6-1574-4e51-8692-b79ee14bc25d [ 649.816193] env[70020]: DEBUG oslo_vmware.api [None req-121efe45-7a44-4df3-9ad7-2a711718e9b4 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617784, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.328904} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.816456] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-121efe45-7a44-4df3-9ad7-2a711718e9b4 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 649.816637] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-121efe45-7a44-4df3-9ad7-2a711718e9b4 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 649.816934] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-121efe45-7a44-4df3-9ad7-2a711718e9b4 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 649.817174] env[70020]: INFO nova.compute.manager [None req-121efe45-7a44-4df3-9ad7-2a711718e9b4 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Took 0.61 seconds to destroy the instance on the hypervisor. [ 649.817419] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-121efe45-7a44-4df3-9ad7-2a711718e9b4 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 649.817596] env[70020]: DEBUG nova.compute.manager [-] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 649.817688] env[70020]: DEBUG nova.network.neutron [-] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 649.914672] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5aba528f-b966-4595-8002-007d724299f6 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Acquiring lock "refresh_cache-b0b825d4-534d-4d54-a0c4-b9e507726c47" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.914872] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5aba528f-b966-4595-8002-007d724299f6 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Acquired lock "refresh_cache-b0b825d4-534d-4d54-a0c4-b9e507726c47" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 649.915067] env[70020]: DEBUG nova.network.neutron [None req-5aba528f-b966-4595-8002-007d724299f6 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 649.977380] env[70020]: DEBUG oslo_vmware.api [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617785, 'name': PowerOnVM_Task} progress is 88%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.030546] env[70020]: DEBUG nova.network.neutron [req-4320c1b1-5e59-4c00-bd06-977b430fd70e req-8a65c234-edf6-409e-90cf-3c80564a68a1 service nova] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Updated VIF entry in instance network info cache for port 7cbd6812-9369-466e-a269-def6f4b8ed8f. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 650.031218] env[70020]: DEBUG nova.network.neutron [req-4320c1b1-5e59-4c00-bd06-977b430fd70e req-8a65c234-edf6-409e-90cf-3c80564a68a1 service nova] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Updating instance_info_cache with network_info: [{"id": "7cbd6812-9369-466e-a269-def6f4b8ed8f", "address": "fa:16:3e:99:62:fe", "network": {"id": "1aa98d4f-b271-4d4d-b8d3-156403999ea9", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1993994671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7412ed0b196c4d44b03bc93b0aae2954", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "503991c4-44d0-42d9-aa03-5259331f1051", "external-id": "nsx-vlan-transportzone-3", "segmentation_id": 3, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cbd6812-93", "ovs_interfaceid": "7cbd6812-9369-466e-a269-def6f4b8ed8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.052153] env[70020]: DEBUG oslo_vmware.api [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3617786, 'name': Rename_Task, 'duration_secs': 0.164328} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.052497] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 650.052918] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fc17df11-ffe9-4f37-818a-1d5742a6e46a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.058854] env[70020]: DEBUG oslo_vmware.api [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for the task: (returnval){ [ 650.058854] env[70020]: value = "task-3617787" [ 650.058854] env[70020]: _type = "Task" [ 650.058854] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.072335] env[70020]: DEBUG oslo_vmware.api [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3617787, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.087825] env[70020]: DEBUG oslo_vmware.api [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ffa5e6-48fd-be60-c483-4ce8df2d8630, 'name': SearchDatastore_Task, 'duration_secs': 0.010083} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.090948] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ce35719-6e1c-448d-9a8b-1d04cac978d0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.097621] env[70020]: DEBUG oslo_vmware.api [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for the task: (returnval){ [ 650.097621] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]529468bf-a962-c79b-02e0-7f1e44ab9d06" [ 650.097621] env[70020]: _type = "Task" [ 650.097621] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.106411] env[70020]: DEBUG oslo_vmware.api [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]529468bf-a962-c79b-02e0-7f1e44ab9d06, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.250625] env[70020]: DEBUG nova.network.neutron [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Successfully updated port: 75bc1ac5-ec35-4914-abff-0fc9c5eb7bf9 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 650.303669] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9d9e1da6-68b7-4cf8-80ac-f6587328453a tempest-ServerDiagnosticsV248Test-1057623464 tempest-ServerDiagnosticsV248Test-1057623464-project-member] Lock "0cc49db6-1574-4e51-8692-b79ee14bc25d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.652s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 650.304866] env[70020]: DEBUG oslo_concurrency.lockutils [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquiring lock "0caa6acd-29d4-43ee-8b32-5149462dfc1c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 650.308337] env[70020]: DEBUG oslo_concurrency.lockutils [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "0caa6acd-29d4-43ee-8b32-5149462dfc1c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 650.437445] env[70020]: DEBUG nova.network.neutron [req-1d2d9212-953d-4db2-b56a-0e2f5c443c7d req-aa362a03-7ffd-4809-b3f7-587b1f371051 service nova] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Updated VIF entry in instance network info cache for port b1d9f41a-978e-4fe2-bb42-1a2ab68ce1b2. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 650.437790] env[70020]: DEBUG nova.network.neutron [req-1d2d9212-953d-4db2-b56a-0e2f5c443c7d req-aa362a03-7ffd-4809-b3f7-587b1f371051 service nova] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Updating instance_info_cache with network_info: [{"id": "b1d9f41a-978e-4fe2-bb42-1a2ab68ce1b2", "address": "fa:16:3e:a9:91:ab", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.238", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1d9f41a-97", "ovs_interfaceid": "b1d9f41a-978e-4fe2-bb42-1a2ab68ce1b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.463346] env[70020]: DEBUG nova.network.neutron [None req-5aba528f-b966-4595-8002-007d724299f6 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 650.476516] env[70020]: DEBUG oslo_vmware.api [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617785, 'name': PowerOnVM_Task, 'duration_secs': 0.850159} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.476868] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 650.477221] env[70020]: INFO nova.compute.manager [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Took 13.20 seconds to spawn the instance on the hypervisor. [ 650.477425] env[70020]: DEBUG nova.compute.manager [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 650.478444] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20679763-a7e3-42f8-8f93-087ed21732e5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.534685] env[70020]: DEBUG oslo_concurrency.lockutils [req-4320c1b1-5e59-4c00-bd06-977b430fd70e req-8a65c234-edf6-409e-90cf-3c80564a68a1 service nova] Releasing lock "refresh_cache-08ce6bc8-30fe-4c63-80e1-26c84ae75702" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 650.580020] env[70020]: DEBUG oslo_vmware.api [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3617787, 'name': PowerOnVM_Task} progress is 88%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.608678] env[70020]: DEBUG oslo_vmware.api [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]529468bf-a962-c79b-02e0-7f1e44ab9d06, 'name': SearchDatastore_Task, 'duration_secs': 0.011123} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.610026] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 650.610026] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 4b5750d4-98ec-4c70-b214-fad97060b606/4b5750d4-98ec-4c70-b214-fad97060b606.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 650.610026] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-258af95f-2209-446e-a69e-f0429385637e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.620864] env[70020]: DEBUG oslo_vmware.api [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for the task: (returnval){ [ 650.620864] env[70020]: value = "task-3617792" [ 650.620864] env[70020]: _type = "Task" [ 650.620864] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.630137] env[70020]: DEBUG oslo_vmware.api [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3617792, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.755107] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "refresh_cache-d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.755107] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquired lock "refresh_cache-d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 650.755107] env[70020]: DEBUG nova.network.neutron [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 650.770758] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5923d41e-7ef7-441e-98ea-69552995ecb2 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 650.772232] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.966s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 650.773806] env[70020]: INFO nova.compute.claims [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 650.859896] env[70020]: DEBUG nova.network.neutron [None req-5aba528f-b966-4595-8002-007d724299f6 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.941540] env[70020]: DEBUG oslo_concurrency.lockutils [req-1d2d9212-953d-4db2-b56a-0e2f5c443c7d req-aa362a03-7ffd-4809-b3f7-587b1f371051 service nova] Releasing lock "refresh_cache-4b5750d4-98ec-4c70-b214-fad97060b606" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 651.004131] env[70020]: INFO nova.compute.manager [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Took 28.09 seconds to build instance. [ 651.085766] env[70020]: DEBUG oslo_vmware.api [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3617787, 'name': PowerOnVM_Task, 'duration_secs': 0.686462} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.086172] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 651.086386] env[70020]: INFO nova.compute.manager [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Took 11.23 seconds to spawn the instance on the hypervisor. [ 651.086573] env[70020]: DEBUG nova.compute.manager [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 651.087535] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e458dab-c80b-47fc-88f9-cbeb3cb0a104 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.132173] env[70020]: DEBUG oslo_vmware.api [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3617792, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.204248] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 651.204407] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 651.316518] env[70020]: DEBUG nova.network.neutron [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 651.348397] env[70020]: DEBUG nova.network.neutron [-] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.363233] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5aba528f-b966-4595-8002-007d724299f6 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Releasing lock "refresh_cache-b0b825d4-534d-4d54-a0c4-b9e507726c47" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 651.363829] env[70020]: DEBUG nova.compute.manager [None req-5aba528f-b966-4595-8002-007d724299f6 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 651.364016] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5aba528f-b966-4595-8002-007d724299f6 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 651.368134] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd4bb0c9-ee46-4454-8e45-b9e24524d093 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.375090] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5aba528f-b966-4595-8002-007d724299f6 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 651.375450] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ef7d5c70-24de-44f7-a80d-b330810474a9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.383630] env[70020]: DEBUG oslo_vmware.api [None req-5aba528f-b966-4595-8002-007d724299f6 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Waiting for the task: (returnval){ [ 651.383630] env[70020]: value = "task-3617793" [ 651.383630] env[70020]: _type = "Task" [ 651.383630] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.394984] env[70020]: DEBUG oslo_vmware.api [None req-5aba528f-b966-4595-8002-007d724299f6 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': task-3617793, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.412113] env[70020]: DEBUG nova.compute.manager [req-652d35ff-c585-4458-8d63-e479072c9245 req-3fb0779f-0ff5-45df-abc9-0ab695605ab3 service nova] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Received event network-vif-plugged-75bc1ac5-ec35-4914-abff-0fc9c5eb7bf9 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 651.413391] env[70020]: DEBUG oslo_concurrency.lockutils [req-652d35ff-c585-4458-8d63-e479072c9245 req-3fb0779f-0ff5-45df-abc9-0ab695605ab3 service nova] Acquiring lock "d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 651.413701] env[70020]: DEBUG oslo_concurrency.lockutils [req-652d35ff-c585-4458-8d63-e479072c9245 req-3fb0779f-0ff5-45df-abc9-0ab695605ab3 service nova] Lock "d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 651.413827] env[70020]: DEBUG oslo_concurrency.lockutils [req-652d35ff-c585-4458-8d63-e479072c9245 req-3fb0779f-0ff5-45df-abc9-0ab695605ab3 service nova] Lock "d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 651.413904] env[70020]: DEBUG nova.compute.manager [req-652d35ff-c585-4458-8d63-e479072c9245 req-3fb0779f-0ff5-45df-abc9-0ab695605ab3 service nova] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] No waiting events found dispatching network-vif-plugged-75bc1ac5-ec35-4914-abff-0fc9c5eb7bf9 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 651.414081] env[70020]: WARNING nova.compute.manager [req-652d35ff-c585-4458-8d63-e479072c9245 req-3fb0779f-0ff5-45df-abc9-0ab695605ab3 service nova] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Received unexpected event network-vif-plugged-75bc1ac5-ec35-4914-abff-0fc9c5eb7bf9 for instance with vm_state building and task_state spawning. [ 651.509704] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c97c03ca-1fd6-4eb1-b865-cbf44d1f2ba4 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Lock "d0756709-f17b-441e-b537-df937cfbde84" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.139s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 651.615252] env[70020]: INFO nova.compute.manager [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Took 28.37 seconds to build instance. [ 651.632400] env[70020]: DEBUG oslo_vmware.api [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3617792, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.786006} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.632400] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 4b5750d4-98ec-4c70-b214-fad97060b606/4b5750d4-98ec-4c70-b214-fad97060b606.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 651.632400] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 651.632595] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6e6a32b5-7a9e-4139-854c-caa145527fc6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.640259] env[70020]: DEBUG oslo_vmware.api [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for the task: (returnval){ [ 651.640259] env[70020]: value = "task-3617794" [ 651.640259] env[70020]: _type = "Task" [ 651.640259] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.649828] env[70020]: DEBUG oslo_vmware.api [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3617794, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.689314] env[70020]: DEBUG nova.network.neutron [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Updating instance_info_cache with network_info: [{"id": "75bc1ac5-ec35-4914-abff-0fc9c5eb7bf9", "address": "fa:16:3e:ce:42:78", "network": {"id": "83a3ee04-e0ff-40e7-ae51-c463add43abb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2003290189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "cfa7d3b1f5a14c60b19cde5030c2f0a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75bc1ac5-ec", "ovs_interfaceid": "75bc1ac5-ec35-4914-abff-0fc9c5eb7bf9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.851275] env[70020]: INFO nova.compute.manager [-] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Took 2.03 seconds to deallocate network for instance. [ 651.895195] env[70020]: DEBUG oslo_vmware.api [None req-5aba528f-b966-4595-8002-007d724299f6 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': task-3617793, 'name': PowerOffVM_Task, 'duration_secs': 0.278754} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.898034] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5aba528f-b966-4595-8002-007d724299f6 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 651.898264] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5aba528f-b966-4595-8002-007d724299f6 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 651.898765] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-443f59c0-9757-4476-b676-8eaaaa84f908 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.926872] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5aba528f-b966-4595-8002-007d724299f6 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 651.926872] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5aba528f-b966-4595-8002-007d724299f6 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 651.926872] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5aba528f-b966-4595-8002-007d724299f6 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Deleting the datastore file [datastore2] b0b825d4-534d-4d54-a0c4-b9e507726c47 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 651.926872] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-54c99fad-b485-4d6c-97e1-042857caf38c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.933405] env[70020]: DEBUG oslo_vmware.api [None req-5aba528f-b966-4595-8002-007d724299f6 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Waiting for the task: (returnval){ [ 651.933405] env[70020]: value = "task-3617796" [ 651.933405] env[70020]: _type = "Task" [ 651.933405] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.947233] env[70020]: DEBUG oslo_vmware.api [None req-5aba528f-b966-4595-8002-007d724299f6 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': task-3617796, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.013489] env[70020]: DEBUG nova.compute.manager [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 652.117989] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ca971112-1fdc-43e1-a08f-4a2ce10df748 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "08ce6bc8-30fe-4c63-80e1-26c84ae75702" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.884s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 652.155616] env[70020]: DEBUG oslo_vmware.api [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3617794, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094877} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.155883] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 652.156862] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-241c3a65-1de8-46ad-adff-57483b8a4a00 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.184707] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Reconfiguring VM instance instance-00000011 to attach disk [datastore2] 4b5750d4-98ec-4c70-b214-fad97060b606/4b5750d4-98ec-4c70-b214-fad97060b606.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 652.185348] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eca6b762-6673-4890-a515-165dbabe10f4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.202162] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Releasing lock "refresh_cache-d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 652.203081] env[70020]: DEBUG nova.compute.manager [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Instance network_info: |[{"id": "75bc1ac5-ec35-4914-abff-0fc9c5eb7bf9", "address": "fa:16:3e:ce:42:78", "network": {"id": "83a3ee04-e0ff-40e7-ae51-c463add43abb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2003290189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "cfa7d3b1f5a14c60b19cde5030c2f0a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75bc1ac5-ec", "ovs_interfaceid": "75bc1ac5-ec35-4914-abff-0fc9c5eb7bf9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 652.204196] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ce:42:78', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d4ef133-b6f3-41d1-add4-92a1482195cf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '75bc1ac5-ec35-4914-abff-0fc9c5eb7bf9', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 652.214592] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Creating folder: Project (cfa7d3b1f5a14c60b19cde5030c2f0a2). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 652.215315] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5ecad010-d39b-49d8-8bf6-975c027c9546 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.221452] env[70020]: DEBUG oslo_vmware.api [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for the task: (returnval){ [ 652.221452] env[70020]: value = "task-3617797" [ 652.221452] env[70020]: _type = "Task" [ 652.221452] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.231063] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Created folder: Project (cfa7d3b1f5a14c60b19cde5030c2f0a2) in parent group-v721521. [ 652.231063] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Creating folder: Instances. Parent ref: group-v721578. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 652.231352] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ac2770a2-d20f-4d3a-8fa5-e5901e0f7b1a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.233324] env[70020]: DEBUG oslo_vmware.api [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3617797, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.244675] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Created folder: Instances in parent group-v721578. [ 652.244973] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 652.247979] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 652.248777] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-328be149-b270-4d16-aaf3-d3648a074396 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.271437] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 652.271437] env[70020]: value = "task-3617800" [ 652.271437] env[70020]: _type = "Task" [ 652.271437] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.280452] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617800, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.310852] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1d3d42b-3512-4d9b-8429-801aed5f864f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.318285] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c271f839-e75b-4c91-a2b4-dd6106db954d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.349863] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-501eaa8e-3e72-46c1-bd16-3945e1dbdfb7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.357793] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d4529ba-abf0-4185-b1bd-8f986a104f59 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.362764] env[70020]: DEBUG oslo_concurrency.lockutils [None req-121efe45-7a44-4df3-9ad7-2a711718e9b4 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 652.374190] env[70020]: DEBUG nova.compute.provider_tree [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 652.443129] env[70020]: DEBUG oslo_vmware.api [None req-5aba528f-b966-4595-8002-007d724299f6 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Task: {'id': task-3617796, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.427776} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.443556] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5aba528f-b966-4595-8002-007d724299f6 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 652.443773] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5aba528f-b966-4595-8002-007d724299f6 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 652.444152] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5aba528f-b966-4595-8002-007d724299f6 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 652.444152] env[70020]: INFO nova.compute.manager [None req-5aba528f-b966-4595-8002-007d724299f6 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Took 1.08 seconds to destroy the instance on the hypervisor. [ 652.444363] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5aba528f-b966-4595-8002-007d724299f6 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 652.444813] env[70020]: DEBUG nova.compute.manager [-] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 652.444813] env[70020]: DEBUG nova.network.neutron [-] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 652.466511] env[70020]: DEBUG nova.network.neutron [-] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 652.560833] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 652.621424] env[70020]: DEBUG nova.compute.manager [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 652.648859] env[70020]: DEBUG nova.network.neutron [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Successfully updated port: c5dfdf74-4ed7-460a-b458-cb45cbc910c2 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 652.737176] env[70020]: DEBUG oslo_vmware.api [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3617797, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.788257] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617800, 'name': CreateVM_Task, 'duration_secs': 0.40806} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.788558] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 652.790806] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 652.790905] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 652.791196] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 652.791579] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa7d4bd5-1253-495a-aa8d-c745bcb1892e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.800575] env[70020]: DEBUG oslo_vmware.api [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 652.800575] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52d564e5-0851-3abe-1261-c0b760b4c538" [ 652.800575] env[70020]: _type = "Task" [ 652.800575] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.813727] env[70020]: DEBUG oslo_vmware.api [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52d564e5-0851-3abe-1261-c0b760b4c538, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.877822] env[70020]: DEBUG nova.scheduler.client.report [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 652.974147] env[70020]: DEBUG nova.network.neutron [-] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 653.005359] env[70020]: DEBUG nova.compute.manager [req-ba9b3b1f-ccf2-425f-8ee4-523b9f0de31f req-d8ec426a-cdc8-419b-8a36-fdb08fb788f7 service nova] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Received event network-vif-plugged-c5dfdf74-4ed7-460a-b458-cb45cbc910c2 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 653.005359] env[70020]: DEBUG oslo_concurrency.lockutils [req-ba9b3b1f-ccf2-425f-8ee4-523b9f0de31f req-d8ec426a-cdc8-419b-8a36-fdb08fb788f7 service nova] Acquiring lock "3501a6fc-f090-4098-8f63-57a97bd61f1b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 653.005359] env[70020]: DEBUG oslo_concurrency.lockutils [req-ba9b3b1f-ccf2-425f-8ee4-523b9f0de31f req-d8ec426a-cdc8-419b-8a36-fdb08fb788f7 service nova] Lock "3501a6fc-f090-4098-8f63-57a97bd61f1b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 653.005615] env[70020]: DEBUG oslo_concurrency.lockutils [req-ba9b3b1f-ccf2-425f-8ee4-523b9f0de31f req-d8ec426a-cdc8-419b-8a36-fdb08fb788f7 service nova] Lock "3501a6fc-f090-4098-8f63-57a97bd61f1b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 653.005697] env[70020]: DEBUG nova.compute.manager [req-ba9b3b1f-ccf2-425f-8ee4-523b9f0de31f req-d8ec426a-cdc8-419b-8a36-fdb08fb788f7 service nova] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] No waiting events found dispatching network-vif-plugged-c5dfdf74-4ed7-460a-b458-cb45cbc910c2 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 653.005939] env[70020]: WARNING nova.compute.manager [req-ba9b3b1f-ccf2-425f-8ee4-523b9f0de31f req-d8ec426a-cdc8-419b-8a36-fdb08fb788f7 service nova] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Received unexpected event network-vif-plugged-c5dfdf74-4ed7-460a-b458-cb45cbc910c2 for instance with vm_state building and task_state spawning. [ 653.158595] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Acquiring lock "refresh_cache-3501a6fc-f090-4098-8f63-57a97bd61f1b" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.158764] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Acquired lock "refresh_cache-3501a6fc-f090-4098-8f63-57a97bd61f1b" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 653.158940] env[70020]: DEBUG nova.network.neutron [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 653.174795] env[70020]: DEBUG oslo_concurrency.lockutils [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 653.235650] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "8adadb2e-2a20-45b1-bed8-34e09df25f39" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 653.235906] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "8adadb2e-2a20-45b1-bed8-34e09df25f39" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 653.241478] env[70020]: DEBUG oslo_vmware.api [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3617797, 'name': ReconfigVM_Task, 'duration_secs': 0.609216} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.241788] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Reconfigured VM instance instance-00000011 to attach disk [datastore2] 4b5750d4-98ec-4c70-b214-fad97060b606/4b5750d4-98ec-4c70-b214-fad97060b606.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 653.242461] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bd313687-e3c3-49f7-9f96-ac7f50cf2476 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.250742] env[70020]: DEBUG oslo_vmware.api [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for the task: (returnval){ [ 653.250742] env[70020]: value = "task-3617801" [ 653.250742] env[70020]: _type = "Task" [ 653.250742] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.267355] env[70020]: DEBUG oslo_vmware.api [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3617801, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.279543] env[70020]: DEBUG nova.compute.manager [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 653.280627] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd50150e-5b2a-4471-ab2c-33588dfc4b37 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.321663] env[70020]: DEBUG oslo_vmware.api [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52d564e5-0851-3abe-1261-c0b760b4c538, 'name': SearchDatastore_Task, 'duration_secs': 0.013916} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.322256] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 653.322924] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 653.322924] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.323431] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 653.323431] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 653.323615] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ed708b91-4292-4e43-84af-db2debb02e67 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.340923] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 653.340923] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 653.344827] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44feb15a-4fe6-4bb7-82cd-7c3924cfae81 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.354469] env[70020]: DEBUG oslo_vmware.api [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 653.354469] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52d6d6ee-e1d2-65ad-fb20-f15c87ada871" [ 653.354469] env[70020]: _type = "Task" [ 653.354469] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.368297] env[70020]: DEBUG oslo_vmware.api [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52d6d6ee-e1d2-65ad-fb20-f15c87ada871, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.387854] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.615s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 653.388407] env[70020]: DEBUG nova.compute.manager [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 653.393471] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29be6e3d-93fd-4418-afca-2b1b9e68ad85 tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.632s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 653.393471] env[70020]: DEBUG nova.objects.instance [None req-29be6e3d-93fd-4418-afca-2b1b9e68ad85 tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Lazy-loading 'resources' on Instance uuid 372e5569-8824-4841-b3d6-4b07423c7b3d {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 653.479143] env[70020]: INFO nova.compute.manager [-] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Took 1.03 seconds to deallocate network for instance. [ 653.767384] env[70020]: DEBUG oslo_vmware.api [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3617801, 'name': Rename_Task, 'duration_secs': 0.157117} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.769131] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 653.772103] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-79e080f6-fd9c-4444-a110-2a60a86b374e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.782107] env[70020]: DEBUG oslo_vmware.api [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for the task: (returnval){ [ 653.782107] env[70020]: value = "task-3617803" [ 653.782107] env[70020]: _type = "Task" [ 653.782107] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.793530] env[70020]: DEBUG oslo_vmware.api [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3617803, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.797665] env[70020]: INFO nova.compute.manager [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] instance snapshotting [ 653.800384] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35903f8f-dae6-4af4-8393-8c956afdca00 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.823523] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ce5d4f-f329-4e66-a728-64dc8f7c79ab {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.844072] env[70020]: DEBUG nova.network.neutron [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 653.865876] env[70020]: DEBUG oslo_vmware.api [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52d6d6ee-e1d2-65ad-fb20-f15c87ada871, 'name': SearchDatastore_Task, 'duration_secs': 0.011899} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.866803] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ceab5a11-8db7-4f64-bee1-c777009d8d0e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.873488] env[70020]: DEBUG oslo_vmware.api [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 653.873488] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52188eae-8455-2614-605a-0e7bc8f0f781" [ 653.873488] env[70020]: _type = "Task" [ 653.873488] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.883934] env[70020]: DEBUG oslo_vmware.api [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52188eae-8455-2614-605a-0e7bc8f0f781, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.895141] env[70020]: DEBUG nova.compute.utils [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 653.896620] env[70020]: DEBUG nova.compute.manager [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 653.896821] env[70020]: DEBUG nova.network.neutron [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 653.985641] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5aba528f-b966-4595-8002-007d724299f6 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 654.050329] env[70020]: DEBUG nova.policy [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4413645e57d6483887bd6431f71360eb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '90641c26c4064f219bf2e52694da4e0d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 654.294769] env[70020]: DEBUG oslo_vmware.api [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3617803, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.326387] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc1c162d-376d-484e-8c29-8bd55aa8be34 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.337016] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Creating Snapshot of the VM instance {{(pid=70020) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 654.337409] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e766a6f0-4828-466f-b3dc-8f2765ffad7c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.341969] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ae42cea-8b74-4f98-97b4-143deac2df6d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.380887] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa8f7e61-4ad2-4161-950e-a834128aace5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.386500] env[70020]: DEBUG oslo_vmware.api [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Waiting for the task: (returnval){ [ 654.386500] env[70020]: value = "task-3617804" [ 654.386500] env[70020]: _type = "Task" [ 654.386500] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.395262] env[70020]: DEBUG oslo_vmware.api [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52188eae-8455-2614-605a-0e7bc8f0f781, 'name': SearchDatastore_Task, 'duration_secs': 0.03} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.395262] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 654.395262] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456/d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 654.395262] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25c279c3-f32b-4d8d-8311-d288c065bb7f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.404764] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2064271a-d6cd-429f-be94-3d112c3f8976 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.407348] env[70020]: DEBUG nova.compute.manager [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 654.410680] env[70020]: DEBUG oslo_vmware.api [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617804, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.423818] env[70020]: DEBUG nova.compute.provider_tree [None req-29be6e3d-93fd-4418-afca-2b1b9e68ad85 tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 654.426104] env[70020]: DEBUG oslo_vmware.api [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 654.426104] env[70020]: value = "task-3617805" [ 654.426104] env[70020]: _type = "Task" [ 654.426104] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.508596] env[70020]: DEBUG nova.network.neutron [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Updating instance_info_cache with network_info: [{"id": "c5dfdf74-4ed7-460a-b458-cb45cbc910c2", "address": "fa:16:3e:40:87:06", "network": {"id": "0676912c-462c-45af-8413-43c139247139", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-574076578-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "852583dc12774b19bffbb2b0791e8336", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f267bcdd-0daa-4337-9709-5fc060c267d8", "external-id": "nsx-vlan-transportzone-308", "segmentation_id": 308, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5dfdf74-4e", "ovs_interfaceid": "c5dfdf74-4ed7-460a-b458-cb45cbc910c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 654.542237] env[70020]: DEBUG nova.compute.manager [req-fd5a68c2-9339-42ef-87a1-c2bd1c371567 req-10b1bcca-c6ec-4028-8029-00e7e0a01b3c service nova] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Received event network-changed-75bc1ac5-ec35-4914-abff-0fc9c5eb7bf9 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 654.542237] env[70020]: DEBUG nova.compute.manager [req-fd5a68c2-9339-42ef-87a1-c2bd1c371567 req-10b1bcca-c6ec-4028-8029-00e7e0a01b3c service nova] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Refreshing instance network info cache due to event network-changed-75bc1ac5-ec35-4914-abff-0fc9c5eb7bf9. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 654.545539] env[70020]: DEBUG oslo_concurrency.lockutils [req-fd5a68c2-9339-42ef-87a1-c2bd1c371567 req-10b1bcca-c6ec-4028-8029-00e7e0a01b3c service nova] Acquiring lock "refresh_cache-d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.546048] env[70020]: DEBUG oslo_concurrency.lockutils [req-fd5a68c2-9339-42ef-87a1-c2bd1c371567 req-10b1bcca-c6ec-4028-8029-00e7e0a01b3c service nova] Acquired lock "refresh_cache-d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 654.546048] env[70020]: DEBUG nova.network.neutron [req-fd5a68c2-9339-42ef-87a1-c2bd1c371567 req-10b1bcca-c6ec-4028-8029-00e7e0a01b3c service nova] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Refreshing network info cache for port 75bc1ac5-ec35-4914-abff-0fc9c5eb7bf9 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 654.802833] env[70020]: DEBUG oslo_vmware.api [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3617803, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.897434] env[70020]: DEBUG oslo_vmware.api [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617804, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.938200] env[70020]: DEBUG nova.scheduler.client.report [None req-29be6e3d-93fd-4418-afca-2b1b9e68ad85 tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 654.950161] env[70020]: DEBUG oslo_vmware.api [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3617805, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.013375] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Releasing lock "refresh_cache-3501a6fc-f090-4098-8f63-57a97bd61f1b" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 655.013720] env[70020]: DEBUG nova.compute.manager [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Instance network_info: |[{"id": "c5dfdf74-4ed7-460a-b458-cb45cbc910c2", "address": "fa:16:3e:40:87:06", "network": {"id": "0676912c-462c-45af-8413-43c139247139", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-574076578-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "852583dc12774b19bffbb2b0791e8336", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f267bcdd-0daa-4337-9709-5fc060c267d8", "external-id": "nsx-vlan-transportzone-308", "segmentation_id": 308, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5dfdf74-4e", "ovs_interfaceid": "c5dfdf74-4ed7-460a-b458-cb45cbc910c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 655.014194] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:40:87:06', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f267bcdd-0daa-4337-9709-5fc060c267d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c5dfdf74-4ed7-460a-b458-cb45cbc910c2', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 655.027173] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Creating folder: Project (852583dc12774b19bffbb2b0791e8336). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 655.027580] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1597892a-0b43-4461-a8cd-0b017ca7af20 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.040170] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Created folder: Project (852583dc12774b19bffbb2b0791e8336) in parent group-v721521. [ 655.040170] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Creating folder: Instances. Parent ref: group-v721581. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 655.040390] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5a175a53-50f8-4d48-84e8-5c26b88c8d96 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.055203] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Created folder: Instances in parent group-v721581. [ 655.056508] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 655.057229] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 655.058400] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2f9878cf-8070-497e-b722-ce95051f3c56 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.083303] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 655.083303] env[70020]: value = "task-3617808" [ 655.083303] env[70020]: _type = "Task" [ 655.083303] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.094520] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617808, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.302883] env[70020]: DEBUG oslo_vmware.api [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3617803, 'name': PowerOnVM_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.398977] env[70020]: DEBUG oslo_vmware.api [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617804, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.425122] env[70020]: DEBUG nova.compute.manager [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 655.440294] env[70020]: DEBUG oslo_vmware.api [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3617805, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.63005} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.440689] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456/d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 655.440960] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 655.441317] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bd30af43-54c3-432b-9d81-f243b6016e45 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.447128] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29be6e3d-93fd-4418-afca-2b1b9e68ad85 tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.055s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 655.449622] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.181s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 655.451760] env[70020]: INFO nova.compute.claims [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 655.457995] env[70020]: DEBUG oslo_vmware.api [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 655.457995] env[70020]: value = "task-3617810" [ 655.457995] env[70020]: _type = "Task" [ 655.457995] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.464400] env[70020]: DEBUG nova.virt.hardware [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T23:01:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='2104475381',id=31,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-928366958',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 655.464701] env[70020]: DEBUG nova.virt.hardware [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 655.464981] env[70020]: DEBUG nova.virt.hardware [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 655.465846] env[70020]: DEBUG nova.virt.hardware [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 655.465846] env[70020]: DEBUG nova.virt.hardware [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 655.465846] env[70020]: DEBUG nova.virt.hardware [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 655.465846] env[70020]: DEBUG nova.virt.hardware [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 655.465846] env[70020]: DEBUG nova.virt.hardware [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 655.466186] env[70020]: DEBUG nova.virt.hardware [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 655.466186] env[70020]: DEBUG nova.virt.hardware [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 655.466186] env[70020]: DEBUG nova.virt.hardware [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 655.467079] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e742fa1-46ce-469b-be4d-8df4799e9d8f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.476465] env[70020]: DEBUG oslo_vmware.api [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3617810, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.481995] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb7e8416-db9a-4e1f-929c-8a2122481d67 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.494122] env[70020]: INFO nova.scheduler.client.report [None req-29be6e3d-93fd-4418-afca-2b1b9e68ad85 tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Deleted allocations for instance 372e5569-8824-4841-b3d6-4b07423c7b3d [ 655.565040] env[70020]: DEBUG nova.network.neutron [req-fd5a68c2-9339-42ef-87a1-c2bd1c371567 req-10b1bcca-c6ec-4028-8029-00e7e0a01b3c service nova] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Updated VIF entry in instance network info cache for port 75bc1ac5-ec35-4914-abff-0fc9c5eb7bf9. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 655.565379] env[70020]: DEBUG nova.network.neutron [req-fd5a68c2-9339-42ef-87a1-c2bd1c371567 req-10b1bcca-c6ec-4028-8029-00e7e0a01b3c service nova] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Updating instance_info_cache with network_info: [{"id": "75bc1ac5-ec35-4914-abff-0fc9c5eb7bf9", "address": "fa:16:3e:ce:42:78", "network": {"id": "83a3ee04-e0ff-40e7-ae51-c463add43abb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2003290189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "cfa7d3b1f5a14c60b19cde5030c2f0a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75bc1ac5-ec", "ovs_interfaceid": "75bc1ac5-ec35-4914-abff-0fc9c5eb7bf9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 655.601681] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617808, 'name': CreateVM_Task, 'duration_secs': 0.362979} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.601681] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 655.601681] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.601681] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 655.601681] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 655.601942] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0af7283-ec07-4fda-abbc-4294fa5972ee {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.607627] env[70020]: DEBUG oslo_vmware.api [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Waiting for the task: (returnval){ [ 655.607627] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]522d8f74-c196-9266-fdbb-a9e624faf1c0" [ 655.607627] env[70020]: _type = "Task" [ 655.607627] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.617162] env[70020]: DEBUG oslo_vmware.api [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]522d8f74-c196-9266-fdbb-a9e624faf1c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.619200] env[70020]: DEBUG nova.network.neutron [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Successfully created port: 31ef45d2-b59a-4c2c-9fdc-f17ae158e442 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 655.637888] env[70020]: DEBUG nova.compute.manager [req-7e94ab44-b05a-4881-a52c-ddced64c3155 req-b3c0fd4b-ebd5-4cfd-b9f9-9c66240c3e62 service nova] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Received event network-changed-c5dfdf74-4ed7-460a-b458-cb45cbc910c2 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 655.638256] env[70020]: DEBUG nova.compute.manager [req-7e94ab44-b05a-4881-a52c-ddced64c3155 req-b3c0fd4b-ebd5-4cfd-b9f9-9c66240c3e62 service nova] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Refreshing instance network info cache due to event network-changed-c5dfdf74-4ed7-460a-b458-cb45cbc910c2. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 655.638529] env[70020]: DEBUG oslo_concurrency.lockutils [req-7e94ab44-b05a-4881-a52c-ddced64c3155 req-b3c0fd4b-ebd5-4cfd-b9f9-9c66240c3e62 service nova] Acquiring lock "refresh_cache-3501a6fc-f090-4098-8f63-57a97bd61f1b" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.638642] env[70020]: DEBUG oslo_concurrency.lockutils [req-7e94ab44-b05a-4881-a52c-ddced64c3155 req-b3c0fd4b-ebd5-4cfd-b9f9-9c66240c3e62 service nova] Acquired lock "refresh_cache-3501a6fc-f090-4098-8f63-57a97bd61f1b" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 655.638989] env[70020]: DEBUG nova.network.neutron [req-7e94ab44-b05a-4881-a52c-ddced64c3155 req-b3c0fd4b-ebd5-4cfd-b9f9-9c66240c3e62 service nova] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Refreshing network info cache for port c5dfdf74-4ed7-460a-b458-cb45cbc910c2 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 655.800695] env[70020]: DEBUG oslo_vmware.api [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3617803, 'name': PowerOnVM_Task, 'duration_secs': 1.542951} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.801408] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 655.804520] env[70020]: INFO nova.compute.manager [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Took 13.37 seconds to spawn the instance on the hypervisor. [ 655.805545] env[70020]: DEBUG nova.compute.manager [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 655.805787] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4940324-8bad-42b5-8d21-7016cf851da1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.895268] env[70020]: DEBUG oslo_vmware.api [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617804, 'name': CreateSnapshot_Task, 'duration_secs': 1.17116} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.896451] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Created Snapshot of the VM instance {{(pid=70020) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 655.896451] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4033cfcb-c1dd-4dc5-b53b-037d2097de14 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.968764] env[70020]: DEBUG oslo_vmware.api [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3617810, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081473} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.968764] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 655.969538] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20fd5edd-71a9-46d7-bd80-783cb356e6c8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.993777] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Reconfiguring VM instance instance-00000012 to attach disk [datastore2] d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456/d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 655.994264] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0efd3acf-18f7-4881-8301-56271bfed10a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.015020] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29be6e3d-93fd-4418-afca-2b1b9e68ad85 tempest-InstanceActionsNegativeTestJSON-1332608593 tempest-InstanceActionsNegativeTestJSON-1332608593-project-member] Lock "372e5569-8824-4841-b3d6-4b07423c7b3d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.752s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 656.016673] env[70020]: DEBUG oslo_vmware.api [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 656.016673] env[70020]: value = "task-3617811" [ 656.016673] env[70020]: _type = "Task" [ 656.016673] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.027328] env[70020]: DEBUG oslo_vmware.api [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3617811, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.068391] env[70020]: DEBUG oslo_concurrency.lockutils [req-fd5a68c2-9339-42ef-87a1-c2bd1c371567 req-10b1bcca-c6ec-4028-8029-00e7e0a01b3c service nova] Releasing lock "refresh_cache-d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 656.068880] env[70020]: DEBUG nova.compute.manager [req-fd5a68c2-9339-42ef-87a1-c2bd1c371567 req-10b1bcca-c6ec-4028-8029-00e7e0a01b3c service nova] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Received event network-vif-deleted-9ead2454-433c-40e4-962b-8e43443376bb {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 656.118401] env[70020]: DEBUG oslo_vmware.api [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]522d8f74-c196-9266-fdbb-a9e624faf1c0, 'name': SearchDatastore_Task, 'duration_secs': 0.014065} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.118688] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 656.118908] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 656.119172] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.119312] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 656.119650] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 656.119751] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0535221c-e9b8-4ca4-860e-00edbf3df4d6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.130314] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 656.130955] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 656.131248] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b739707-16fa-4ac5-bd7c-cb1fe0ffd2da {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.141283] env[70020]: DEBUG oslo_vmware.api [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Waiting for the task: (returnval){ [ 656.141283] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5282107e-51a2-8ceb-3fa3-972508937b1c" [ 656.141283] env[70020]: _type = "Task" [ 656.141283] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.154564] env[70020]: DEBUG oslo_vmware.api [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5282107e-51a2-8ceb-3fa3-972508937b1c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.338621] env[70020]: INFO nova.compute.manager [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Took 31.62 seconds to build instance. [ 656.417140] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Creating linked-clone VM from snapshot {{(pid=70020) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 656.419918] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-259049c6-e2ef-475b-befa-4d8b2b375451 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.435869] env[70020]: DEBUG oslo_vmware.api [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Waiting for the task: (returnval){ [ 656.435869] env[70020]: value = "task-3617812" [ 656.435869] env[70020]: _type = "Task" [ 656.435869] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.449986] env[70020]: DEBUG oslo_vmware.api [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617812, 'name': CloneVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.531430] env[70020]: DEBUG oslo_vmware.api [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3617811, 'name': ReconfigVM_Task, 'duration_secs': 0.316461} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.531430] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Reconfigured VM instance instance-00000012 to attach disk [datastore2] d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456/d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 656.534776] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4311ebef-72b8-4379-8228-49094dce1bc7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.540996] env[70020]: DEBUG oslo_vmware.api [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 656.540996] env[70020]: value = "task-3617813" [ 656.540996] env[70020]: _type = "Task" [ 656.540996] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.553605] env[70020]: DEBUG oslo_vmware.api [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3617813, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.654941] env[70020]: DEBUG oslo_vmware.api [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5282107e-51a2-8ceb-3fa3-972508937b1c, 'name': SearchDatastore_Task, 'duration_secs': 0.022365} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.656081] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10c0bd66-8a61-4d62-bf08-0ab520d0e9a0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.670021] env[70020]: DEBUG oslo_vmware.api [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Waiting for the task: (returnval){ [ 656.670021] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5228c8be-18ce-56b8-dd63-087fa514ebde" [ 656.670021] env[70020]: _type = "Task" [ 656.670021] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.680016] env[70020]: DEBUG oslo_vmware.api [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5228c8be-18ce-56b8-dd63-087fa514ebde, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.843099] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb514d95-16d3-46c1-a4f8-34aea5fb138d tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Lock "4b5750d4-98ec-4c70-b214-fad97060b606" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.133s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 656.947911] env[70020]: DEBUG oslo_vmware.api [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617812, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.952414] env[70020]: DEBUG nova.network.neutron [req-7e94ab44-b05a-4881-a52c-ddced64c3155 req-b3c0fd4b-ebd5-4cfd-b9f9-9c66240c3e62 service nova] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Updated VIF entry in instance network info cache for port c5dfdf74-4ed7-460a-b458-cb45cbc910c2. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 656.952746] env[70020]: DEBUG nova.network.neutron [req-7e94ab44-b05a-4881-a52c-ddced64c3155 req-b3c0fd4b-ebd5-4cfd-b9f9-9c66240c3e62 service nova] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Updating instance_info_cache with network_info: [{"id": "c5dfdf74-4ed7-460a-b458-cb45cbc910c2", "address": "fa:16:3e:40:87:06", "network": {"id": "0676912c-462c-45af-8413-43c139247139", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-574076578-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "852583dc12774b19bffbb2b0791e8336", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f267bcdd-0daa-4337-9709-5fc060c267d8", "external-id": "nsx-vlan-transportzone-308", "segmentation_id": 308, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5dfdf74-4e", "ovs_interfaceid": "c5dfdf74-4ed7-460a-b458-cb45cbc910c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 657.059729] env[70020]: DEBUG oslo_vmware.api [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3617813, 'name': Rename_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.068033] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd512613-1783-416b-af60-a9fdcb496bb8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.078333] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-601ad23d-b456-4146-85c6-631059f34228 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.115775] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6488be7-c419-4ae8-90c5-7342c5ddf3a9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.124218] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2e27548-b6fc-4c28-aed8-6fb159d631c6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.140807] env[70020]: DEBUG nova.compute.provider_tree [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 657.179189] env[70020]: DEBUG oslo_vmware.api [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5228c8be-18ce-56b8-dd63-087fa514ebde, 'name': SearchDatastore_Task, 'duration_secs': 0.015373} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.179662] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 657.179768] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 3501a6fc-f090-4098-8f63-57a97bd61f1b/3501a6fc-f090-4098-8f63-57a97bd61f1b.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 657.180908] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e6b3942b-9630-4c73-b9dc-4fe2b2dba213 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.188554] env[70020]: DEBUG oslo_vmware.api [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Waiting for the task: (returnval){ [ 657.188554] env[70020]: value = "task-3617814" [ 657.188554] env[70020]: _type = "Task" [ 657.188554] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.196410] env[70020]: DEBUG oslo_vmware.api [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Task: {'id': task-3617814, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.345617] env[70020]: DEBUG nova.compute.manager [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 657.451614] env[70020]: DEBUG oslo_vmware.api [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617812, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.459791] env[70020]: DEBUG oslo_concurrency.lockutils [req-7e94ab44-b05a-4881-a52c-ddced64c3155 req-b3c0fd4b-ebd5-4cfd-b9f9-9c66240c3e62 service nova] Releasing lock "refresh_cache-3501a6fc-f090-4098-8f63-57a97bd61f1b" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 657.563882] env[70020]: DEBUG oslo_vmware.api [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3617813, 'name': Rename_Task, 'duration_secs': 0.875467} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.563882] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 657.563882] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7f746e78-e50b-4837-9b87-070cac6536c2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.571097] env[70020]: DEBUG oslo_vmware.api [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 657.571097] env[70020]: value = "task-3617816" [ 657.571097] env[70020]: _type = "Task" [ 657.571097] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.586641] env[70020]: DEBUG oslo_vmware.api [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3617816, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.644125] env[70020]: DEBUG nova.scheduler.client.report [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 657.702611] env[70020]: DEBUG oslo_vmware.api [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Task: {'id': task-3617814, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.772890] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8ba5f8b3-e9ea-4f83-ae79-d43b6369eaeb tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Acquiring lock "1f95bfa8-bc97-4ed7-8c33-c00297430bf5" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 657.772890] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8ba5f8b3-e9ea-4f83-ae79-d43b6369eaeb tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Lock "1f95bfa8-bc97-4ed7-8c33-c00297430bf5" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 657.885855] env[70020]: DEBUG oslo_concurrency.lockutils [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 657.952186] env[70020]: DEBUG oslo_vmware.api [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617812, 'name': CloneVM_Task} progress is 95%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.086987] env[70020]: DEBUG oslo_vmware.api [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3617816, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.154149] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.705s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 658.154796] env[70020]: DEBUG nova.compute.manager [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 658.161238] env[70020]: DEBUG oslo_concurrency.lockutils [None req-93a43920-00ea-49d0-9ee8-939bbe650a83 tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.206s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 658.161515] env[70020]: DEBUG nova.objects.instance [None req-93a43920-00ea-49d0-9ee8-939bbe650a83 tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Lazy-loading 'resources' on Instance uuid a0b4a0b0-748d-46eb-9e39-3f21e394c090 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 658.200672] env[70020]: DEBUG oslo_vmware.api [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Task: {'id': task-3617814, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.62409} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.201154] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 3501a6fc-f090-4098-8f63-57a97bd61f1b/3501a6fc-f090-4098-8f63-57a97bd61f1b.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 658.201532] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 658.201909] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4c748c3e-dd6e-4c67-9ce0-76af42791461 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.209291] env[70020]: DEBUG oslo_vmware.api [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Waiting for the task: (returnval){ [ 658.209291] env[70020]: value = "task-3617817" [ 658.209291] env[70020]: _type = "Task" [ 658.209291] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.219968] env[70020]: DEBUG oslo_vmware.api [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Task: {'id': task-3617817, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.279666] env[70020]: DEBUG nova.compute.utils [None req-8ba5f8b3-e9ea-4f83-ae79-d43b6369eaeb tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 658.458670] env[70020]: DEBUG oslo_vmware.api [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617812, 'name': CloneVM_Task, 'duration_secs': 1.802555} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.459607] env[70020]: INFO nova.virt.vmwareapi.vmops [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Created linked-clone VM from snapshot [ 658.459946] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c69d27d1-2649-49a5-9a70-5bdae15d256a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.474117] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Uploading image ef5754c9-6b33-4609-bfbc-a01eecd8d813 {{(pid=70020) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 658.514018] env[70020]: DEBUG oslo_vmware.rw_handles [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 658.514018] env[70020]: value = "vm-721585" [ 658.514018] env[70020]: _type = "VirtualMachine" [ 658.514018] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 658.514018] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-b01ec8e9-01bb-472d-9536-4fcb722df77f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.526043] env[70020]: DEBUG oslo_vmware.rw_handles [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Lease: (returnval){ [ 658.526043] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52e05d0b-a47a-6e42-4c4f-b3d5defec450" [ 658.526043] env[70020]: _type = "HttpNfcLease" [ 658.526043] env[70020]: } obtained for exporting VM: (result){ [ 658.526043] env[70020]: value = "vm-721585" [ 658.526043] env[70020]: _type = "VirtualMachine" [ 658.526043] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 658.526600] env[70020]: DEBUG oslo_vmware.api [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Waiting for the lease: (returnval){ [ 658.526600] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52e05d0b-a47a-6e42-4c4f-b3d5defec450" [ 658.526600] env[70020]: _type = "HttpNfcLease" [ 658.526600] env[70020]: } to be ready. {{(pid=70020) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 658.529202] env[70020]: DEBUG nova.network.neutron [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Successfully updated port: 31ef45d2-b59a-4c2c-9fdc-f17ae158e442 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 658.535974] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 658.535974] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52e05d0b-a47a-6e42-4c4f-b3d5defec450" [ 658.535974] env[70020]: _type = "HttpNfcLease" [ 658.535974] env[70020]: } is initializing. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 658.584818] env[70020]: DEBUG oslo_vmware.api [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3617816, 'name': PowerOnVM_Task, 'duration_secs': 0.723739} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.585135] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 658.585362] env[70020]: INFO nova.compute.manager [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Took 11.52 seconds to spawn the instance on the hypervisor. [ 658.585568] env[70020]: DEBUG nova.compute.manager [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 658.586449] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0c7fe9f-e6ac-4293-bf78-0dad8147df2d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.664279] env[70020]: DEBUG nova.compute.utils [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 658.665831] env[70020]: DEBUG nova.compute.manager [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 658.665934] env[70020]: DEBUG nova.network.neutron [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 658.720387] env[70020]: DEBUG oslo_vmware.api [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Task: {'id': task-3617817, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.100178} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.723922] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 658.723922] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d79db1c-2642-4a7c-abb5-165204dcf60d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.749986] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Reconfiguring VM instance instance-00000013 to attach disk [datastore2] 3501a6fc-f090-4098-8f63-57a97bd61f1b/3501a6fc-f090-4098-8f63-57a97bd61f1b.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 658.753959] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4223c682-cb22-49c8-8ebe-1b702f727015 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.771360] env[70020]: DEBUG nova.policy [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bb7af06db94f4e57b35617f2a2aaafa2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '30d21f5de57c422db3b718ab4c760ac3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 658.778243] env[70020]: DEBUG oslo_vmware.api [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Waiting for the task: (returnval){ [ 658.778243] env[70020]: value = "task-3617819" [ 658.778243] env[70020]: _type = "Task" [ 658.778243] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.786112] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8ba5f8b3-e9ea-4f83-ae79-d43b6369eaeb tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Lock "1f95bfa8-bc97-4ed7-8c33-c00297430bf5" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.013s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 658.791938] env[70020]: DEBUG oslo_vmware.api [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Task: {'id': task-3617819, 'name': ReconfigVM_Task} progress is 10%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.034311] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Acquiring lock "refresh_cache-301b30f6-9909-4fc9-8721-88a314e4edb4" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.034421] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Acquired lock "refresh_cache-301b30f6-9909-4fc9-8721-88a314e4edb4" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 659.034544] env[70020]: DEBUG nova.network.neutron [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 659.041508] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 659.041508] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52e05d0b-a47a-6e42-4c4f-b3d5defec450" [ 659.041508] env[70020]: _type = "HttpNfcLease" [ 659.041508] env[70020]: } is ready. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 659.042438] env[70020]: DEBUG oslo_vmware.rw_handles [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 659.042438] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52e05d0b-a47a-6e42-4c4f-b3d5defec450" [ 659.042438] env[70020]: _type = "HttpNfcLease" [ 659.042438] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 659.042881] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-835725a4-ea6f-4081-9671-3121116f0bb8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.062037] env[70020]: DEBUG oslo_vmware.rw_handles [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c8dcfb-90b9-4089-17f9-3288b6cdfe64/disk-0.vmdk from lease info. {{(pid=70020) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 659.062037] env[70020]: DEBUG oslo_vmware.rw_handles [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c8dcfb-90b9-4089-17f9-3288b6cdfe64/disk-0.vmdk for reading. {{(pid=70020) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 659.137582] env[70020]: INFO nova.compute.manager [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Took 29.56 seconds to build instance. [ 659.174244] env[70020]: DEBUG nova.compute.manager [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 659.212861] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-fa878eea-6c65-4809-aaa4-512aa07aacb0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.290575] env[70020]: DEBUG oslo_vmware.api [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Task: {'id': task-3617819, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.305299] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa45352a-a9f7-4643-86b3-b54a736b4654 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.313546] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-615cc439-ad02-4691-a251-1ac3c14807c3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.349410] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8886262-b1b6-4cab-8c89-940e7b97c91d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.357646] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-048d3126-5c7c-4526-a245-40946f69a6e0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.378346] env[70020]: DEBUG nova.compute.provider_tree [None req-93a43920-00ea-49d0-9ee8-939bbe650a83 tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 659.626656] env[70020]: DEBUG nova.network.neutron [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 659.639337] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29035b6a-6f4f-49d6-94fe-580c50c72742 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.026s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 659.753243] env[70020]: DEBUG nova.network.neutron [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Successfully created port: 09f3d7f9-1529-498f-b393-01af888741b2 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 659.793745] env[70020]: DEBUG oslo_vmware.api [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Task: {'id': task-3617819, 'name': ReconfigVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.805371] env[70020]: DEBUG nova.compute.manager [req-e95bcce5-53db-4fe0-92f6-e7a72541f08e req-51e45903-031e-48ae-af0a-501d82eb06d1 service nova] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Received event network-changed-7cbd6812-9369-466e-a269-def6f4b8ed8f {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 659.805869] env[70020]: DEBUG nova.compute.manager [req-e95bcce5-53db-4fe0-92f6-e7a72541f08e req-51e45903-031e-48ae-af0a-501d82eb06d1 service nova] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Refreshing instance network info cache due to event network-changed-7cbd6812-9369-466e-a269-def6f4b8ed8f. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 659.806222] env[70020]: DEBUG oslo_concurrency.lockutils [req-e95bcce5-53db-4fe0-92f6-e7a72541f08e req-51e45903-031e-48ae-af0a-501d82eb06d1 service nova] Acquiring lock "refresh_cache-08ce6bc8-30fe-4c63-80e1-26c84ae75702" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.806492] env[70020]: DEBUG oslo_concurrency.lockutils [req-e95bcce5-53db-4fe0-92f6-e7a72541f08e req-51e45903-031e-48ae-af0a-501d82eb06d1 service nova] Acquired lock "refresh_cache-08ce6bc8-30fe-4c63-80e1-26c84ae75702" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 659.806776] env[70020]: DEBUG nova.network.neutron [req-e95bcce5-53db-4fe0-92f6-e7a72541f08e req-51e45903-031e-48ae-af0a-501d82eb06d1 service nova] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Refreshing network info cache for port 7cbd6812-9369-466e-a269-def6f4b8ed8f {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 659.886179] env[70020]: DEBUG nova.scheduler.client.report [None req-93a43920-00ea-49d0-9ee8-939bbe650a83 tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 659.964588] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8ba5f8b3-e9ea-4f83-ae79-d43b6369eaeb tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Acquiring lock "1f95bfa8-bc97-4ed7-8c33-c00297430bf5" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 659.964858] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8ba5f8b3-e9ea-4f83-ae79-d43b6369eaeb tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Lock "1f95bfa8-bc97-4ed7-8c33-c00297430bf5" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 659.966133] env[70020]: INFO nova.compute.manager [None req-8ba5f8b3-e9ea-4f83-ae79-d43b6369eaeb tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Attaching volume e54a3dde-2eb6-4c6d-aea0-e0c46f4906bd to /dev/sdb [ 660.027182] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7764d0d4-2e6c-468c-add5-8a4367f552b0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.037244] env[70020]: DEBUG nova.compute.manager [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Stashing vm_state: active {{(pid=70020) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 660.041375] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7405b6ce-e588-4b6b-8e94-cd7d67bf9cdf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.057549] env[70020]: DEBUG nova.virt.block_device [None req-8ba5f8b3-e9ea-4f83-ae79-d43b6369eaeb tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Updating existing volume attachment record: 9a57e4b5-8ccd-46e3-9b42-5809a8ba5c0f {{(pid=70020) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 660.110051] env[70020]: DEBUG nova.network.neutron [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Updating instance_info_cache with network_info: [{"id": "31ef45d2-b59a-4c2c-9fdc-f17ae158e442", "address": "fa:16:3e:33:32:4f", "network": {"id": "47f58371-9cbc-4ed3-98be-09900c36cbf3", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-119128355-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90641c26c4064f219bf2e52694da4e0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a94c29-ddd5-4383-9219-1c2c3bb09cc5", "external-id": "nsx-vlan-transportzone-2", "segmentation_id": 2, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31ef45d2-b5", "ovs_interfaceid": "31ef45d2-b59a-4c2c-9fdc-f17ae158e442", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 660.143285] env[70020]: DEBUG nova.compute.manager [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 660.183248] env[70020]: DEBUG nova.compute.manager [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 660.218922] env[70020]: DEBUG nova.virt.hardware [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 660.220460] env[70020]: DEBUG nova.virt.hardware [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 660.220460] env[70020]: DEBUG nova.virt.hardware [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 660.221128] env[70020]: DEBUG nova.virt.hardware [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 660.221275] env[70020]: DEBUG nova.virt.hardware [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 660.221627] env[70020]: DEBUG nova.virt.hardware [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 660.221866] env[70020]: DEBUG nova.virt.hardware [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 660.222108] env[70020]: DEBUG nova.virt.hardware [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 660.222570] env[70020]: DEBUG nova.virt.hardware [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 660.224200] env[70020]: DEBUG nova.virt.hardware [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 660.224200] env[70020]: DEBUG nova.virt.hardware [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 660.227390] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0de0f487-fb9c-42e6-b794-354b54d951ce {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.241420] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b69bfbe-e858-417f-b8b9-31ed4c4231ed {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.290954] env[70020]: DEBUG nova.compute.manager [req-432b456b-4843-49ab-a9a0-9e38d47abe64 req-e3deb96a-c25b-406e-af12-2fd27f7cd48a service nova] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Received event network-vif-plugged-31ef45d2-b59a-4c2c-9fdc-f17ae158e442 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 660.290954] env[70020]: DEBUG oslo_concurrency.lockutils [req-432b456b-4843-49ab-a9a0-9e38d47abe64 req-e3deb96a-c25b-406e-af12-2fd27f7cd48a service nova] Acquiring lock "301b30f6-9909-4fc9-8721-88a314e4edb4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 660.290954] env[70020]: DEBUG oslo_concurrency.lockutils [req-432b456b-4843-49ab-a9a0-9e38d47abe64 req-e3deb96a-c25b-406e-af12-2fd27f7cd48a service nova] Lock "301b30f6-9909-4fc9-8721-88a314e4edb4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 660.290954] env[70020]: DEBUG oslo_concurrency.lockutils [req-432b456b-4843-49ab-a9a0-9e38d47abe64 req-e3deb96a-c25b-406e-af12-2fd27f7cd48a service nova] Lock "301b30f6-9909-4fc9-8721-88a314e4edb4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 660.290954] env[70020]: DEBUG nova.compute.manager [req-432b456b-4843-49ab-a9a0-9e38d47abe64 req-e3deb96a-c25b-406e-af12-2fd27f7cd48a service nova] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] No waiting events found dispatching network-vif-plugged-31ef45d2-b59a-4c2c-9fdc-f17ae158e442 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 660.291268] env[70020]: WARNING nova.compute.manager [req-432b456b-4843-49ab-a9a0-9e38d47abe64 req-e3deb96a-c25b-406e-af12-2fd27f7cd48a service nova] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Received unexpected event network-vif-plugged-31ef45d2-b59a-4c2c-9fdc-f17ae158e442 for instance with vm_state building and task_state spawning. [ 660.302148] env[70020]: DEBUG oslo_vmware.api [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Task: {'id': task-3617819, 'name': ReconfigVM_Task, 'duration_secs': 1.063358} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.302455] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Reconfigured VM instance instance-00000013 to attach disk [datastore2] 3501a6fc-f090-4098-8f63-57a97bd61f1b/3501a6fc-f090-4098-8f63-57a97bd61f1b.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 660.303229] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9a38306b-d6b2-4a33-8853-0f2f81b078ec {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.312360] env[70020]: DEBUG oslo_vmware.api [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Waiting for the task: (returnval){ [ 660.312360] env[70020]: value = "task-3617825" [ 660.312360] env[70020]: _type = "Task" [ 660.312360] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.321416] env[70020]: DEBUG oslo_vmware.api [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Task: {'id': task-3617825, 'name': Rename_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.396152] env[70020]: DEBUG oslo_concurrency.lockutils [None req-93a43920-00ea-49d0-9ee8-939bbe650a83 tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.235s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 660.398851] env[70020]: DEBUG oslo_concurrency.lockutils [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.764s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 660.401114] env[70020]: INFO nova.compute.claims [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 660.438032] env[70020]: INFO nova.scheduler.client.report [None req-93a43920-00ea-49d0-9ee8-939bbe650a83 tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Deleted allocations for instance a0b4a0b0-748d-46eb-9e39-3f21e394c090 [ 660.569949] env[70020]: DEBUG oslo_concurrency.lockutils [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 660.615382] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Releasing lock "refresh_cache-301b30f6-9909-4fc9-8721-88a314e4edb4" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 660.615804] env[70020]: DEBUG nova.compute.manager [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Instance network_info: |[{"id": "31ef45d2-b59a-4c2c-9fdc-f17ae158e442", "address": "fa:16:3e:33:32:4f", "network": {"id": "47f58371-9cbc-4ed3-98be-09900c36cbf3", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-119128355-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90641c26c4064f219bf2e52694da4e0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a94c29-ddd5-4383-9219-1c2c3bb09cc5", "external-id": "nsx-vlan-transportzone-2", "segmentation_id": 2, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31ef45d2-b5", "ovs_interfaceid": "31ef45d2-b59a-4c2c-9fdc-f17ae158e442", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 660.616356] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:33:32:4f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '49a94c29-ddd5-4383-9219-1c2c3bb09cc5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '31ef45d2-b59a-4c2c-9fdc-f17ae158e442', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 660.625940] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Creating folder: Project (90641c26c4064f219bf2e52694da4e0d). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 660.626306] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3a98318d-2f15-4e28-98b1-5d26ed2c7892 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.637871] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Created folder: Project (90641c26c4064f219bf2e52694da4e0d) in parent group-v721521. [ 660.638160] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Creating folder: Instances. Parent ref: group-v721592. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 660.638324] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e37e7afa-256e-4005-a4a6-1bf98dd6670e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.649321] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Created folder: Instances in parent group-v721592. [ 660.649618] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 660.649833] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 660.652819] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b7ecef95-775a-4ee2-82e7-f22c15afff0f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.674635] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 660.674635] env[70020]: value = "task-3617830" [ 660.674635] env[70020]: _type = "Task" [ 660.674635] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.683067] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617830, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.689944] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 660.825987] env[70020]: DEBUG oslo_vmware.api [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Task: {'id': task-3617825, 'name': Rename_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.950249] env[70020]: DEBUG oslo_concurrency.lockutils [None req-93a43920-00ea-49d0-9ee8-939bbe650a83 tempest-ServerAddressesNegativeTestJSON-1327869760 tempest-ServerAddressesNegativeTestJSON-1327869760-project-member] Lock "a0b4a0b0-748d-46eb-9e39-3f21e394c090" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.369s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 661.189350] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617830, 'name': CreateVM_Task, 'duration_secs': 0.458547} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.190223] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 661.192176] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.192176] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 661.192176] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 661.192176] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e0640d7-a6b0-4dd7-92b1-6798e3ea7009 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.197984] env[70020]: DEBUG oslo_vmware.api [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Waiting for the task: (returnval){ [ 661.197984] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52dca557-86a9-4ad6-661f-aa5ce91407a1" [ 661.197984] env[70020]: _type = "Task" [ 661.197984] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.212342] env[70020]: DEBUG oslo_vmware.api [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52dca557-86a9-4ad6-661f-aa5ce91407a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.326195] env[70020]: DEBUG oslo_vmware.api [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Task: {'id': task-3617825, 'name': Rename_Task, 'duration_secs': 0.656149} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.326195] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 661.326195] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c0c18fab-8a13-4b58-b496-4045de473207 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.332697] env[70020]: DEBUG oslo_vmware.api [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Waiting for the task: (returnval){ [ 661.332697] env[70020]: value = "task-3617831" [ 661.332697] env[70020]: _type = "Task" [ 661.332697] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.341422] env[70020]: DEBUG oslo_vmware.api [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Task: {'id': task-3617831, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.344078] env[70020]: DEBUG nova.network.neutron [req-e95bcce5-53db-4fe0-92f6-e7a72541f08e req-51e45903-031e-48ae-af0a-501d82eb06d1 service nova] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Updated VIF entry in instance network info cache for port 7cbd6812-9369-466e-a269-def6f4b8ed8f. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 661.344078] env[70020]: DEBUG nova.network.neutron [req-e95bcce5-53db-4fe0-92f6-e7a72541f08e req-51e45903-031e-48ae-af0a-501d82eb06d1 service nova] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Updating instance_info_cache with network_info: [{"id": "7cbd6812-9369-466e-a269-def6f4b8ed8f", "address": "fa:16:3e:99:62:fe", "network": {"id": "1aa98d4f-b271-4d4d-b8d3-156403999ea9", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1993994671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.133", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7412ed0b196c4d44b03bc93b0aae2954", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "503991c4-44d0-42d9-aa03-5259331f1051", "external-id": "nsx-vlan-transportzone-3", "segmentation_id": 3, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cbd6812-93", "ovs_interfaceid": "7cbd6812-9369-466e-a269-def6f4b8ed8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.713527] env[70020]: DEBUG oslo_vmware.api [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52dca557-86a9-4ad6-661f-aa5ce91407a1, 'name': SearchDatastore_Task, 'duration_secs': 0.027397} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.716742] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 661.716959] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 661.717534] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.717669] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 661.717864] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 661.718877] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d9599def-57d6-4286-8abe-325ca39921ed {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.732019] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 661.732116] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 661.735787] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00e53e90-fe9c-4eed-ac5d-f963e3764b6a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.741867] env[70020]: DEBUG oslo_vmware.api [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Waiting for the task: (returnval){ [ 661.741867] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5200b1af-462f-32a1-8a24-57f3bab1b688" [ 661.741867] env[70020]: _type = "Task" [ 661.741867] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.751857] env[70020]: DEBUG oslo_vmware.api [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5200b1af-462f-32a1-8a24-57f3bab1b688, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.833722] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2872c34a-37b5-407e-86f9-5eec930e97c5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.846853] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b01ad426-2871-4876-8e6a-45d5559edba3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.850446] env[70020]: DEBUG oslo_concurrency.lockutils [req-e95bcce5-53db-4fe0-92f6-e7a72541f08e req-51e45903-031e-48ae-af0a-501d82eb06d1 service nova] Releasing lock "refresh_cache-08ce6bc8-30fe-4c63-80e1-26c84ae75702" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 661.850919] env[70020]: DEBUG oslo_vmware.api [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Task: {'id': task-3617831, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.881940] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7f268ab-7d17-49e9-977c-1e5abad16f3c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.896020] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b8df319-8c0c-4951-9995-4e84ab91035b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.912539] env[70020]: DEBUG nova.compute.provider_tree [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 662.257308] env[70020]: DEBUG oslo_vmware.api [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5200b1af-462f-32a1-8a24-57f3bab1b688, 'name': SearchDatastore_Task, 'duration_secs': 0.016689} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.258424] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83fbe2d6-c6ed-4b17-ae67-0175c7e8ce70 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.266418] env[70020]: DEBUG oslo_vmware.api [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Waiting for the task: (returnval){ [ 662.266418] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5262f656-5dc2-3f38-3cc9-039fb2b8a498" [ 662.266418] env[70020]: _type = "Task" [ 662.266418] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.277441] env[70020]: DEBUG oslo_vmware.api [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5262f656-5dc2-3f38-3cc9-039fb2b8a498, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.351455] env[70020]: DEBUG oslo_vmware.api [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Task: {'id': task-3617831, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.395156] env[70020]: DEBUG nova.network.neutron [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Successfully updated port: 09f3d7f9-1529-498f-b393-01af888741b2 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 662.442835] env[70020]: ERROR nova.scheduler.client.report [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [req-60b11884-27aa-4f8b-b30c-3963815a1a46] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-60b11884-27aa-4f8b-b30c-3963815a1a46"}]} [ 662.473885] env[70020]: DEBUG nova.scheduler.client.report [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 662.497222] env[70020]: DEBUG nova.scheduler.client.report [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 662.497222] env[70020]: DEBUG nova.compute.provider_tree [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 662.516416] env[70020]: DEBUG nova.scheduler.client.report [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 662.552307] env[70020]: DEBUG nova.scheduler.client.report [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 662.778121] env[70020]: DEBUG oslo_vmware.api [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5262f656-5dc2-3f38-3cc9-039fb2b8a498, 'name': SearchDatastore_Task, 'duration_secs': 0.019818} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.778495] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 662.778535] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 301b30f6-9909-4fc9-8721-88a314e4edb4/301b30f6-9909-4fc9-8721-88a314e4edb4.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 662.778830] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-05288a98-3da1-47c0-b379-0c15acd16c2f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.791081] env[70020]: DEBUG oslo_vmware.api [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Waiting for the task: (returnval){ [ 662.791081] env[70020]: value = "task-3617835" [ 662.791081] env[70020]: _type = "Task" [ 662.791081] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.801144] env[70020]: DEBUG oslo_vmware.api [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': task-3617835, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.848959] env[70020]: DEBUG oslo_vmware.api [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Task: {'id': task-3617831, 'name': PowerOnVM_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.905800] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Acquiring lock "refresh_cache-0f89d49e-d26c-4d5d-90d7-6f0bf3d67468" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.905800] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Acquired lock "refresh_cache-0f89d49e-d26c-4d5d-90d7-6f0bf3d67468" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 662.905937] env[70020]: DEBUG nova.network.neutron [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 663.014287] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f1d7890-9b7f-4b7d-aa33-a470681cb580 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.023699] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f63454ef-071a-4c1d-90f7-d70dcdfd2e6e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.061260] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c852770-7c13-456f-a514-2e11219a9cc7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.071193] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3704467-0c16-4f76-aa9f-5f849cbaa7eb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.087045] env[70020]: DEBUG nova.compute.provider_tree [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 663.301448] env[70020]: DEBUG oslo_vmware.api [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': task-3617835, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.348992] env[70020]: DEBUG oslo_vmware.api [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Task: {'id': task-3617831, 'name': PowerOnVM_Task, 'duration_secs': 1.712041} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.349274] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 663.353020] env[70020]: INFO nova.compute.manager [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Took 13.64 seconds to spawn the instance on the hypervisor. [ 663.353020] env[70020]: DEBUG nova.compute.manager [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 663.353020] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cb47df0-2944-4b2f-9a9f-176be4247937 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.468421] env[70020]: DEBUG nova.network.neutron [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 663.607324] env[70020]: DEBUG oslo_concurrency.lockutils [None req-acc22b6f-6bb0-4687-81f2-2b3f3e1fecf5 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 663.607324] env[70020]: DEBUG oslo_concurrency.lockutils [None req-acc22b6f-6bb0-4687-81f2-2b3f3e1fecf5 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 663.607324] env[70020]: DEBUG oslo_concurrency.lockutils [None req-acc22b6f-6bb0-4687-81f2-2b3f3e1fecf5 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 663.607324] env[70020]: DEBUG oslo_concurrency.lockutils [None req-acc22b6f-6bb0-4687-81f2-2b3f3e1fecf5 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 663.608763] env[70020]: DEBUG oslo_concurrency.lockutils [None req-acc22b6f-6bb0-4687-81f2-2b3f3e1fecf5 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 663.612123] env[70020]: INFO nova.compute.manager [None req-acc22b6f-6bb0-4687-81f2-2b3f3e1fecf5 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Terminating instance [ 663.652138] env[70020]: DEBUG nova.scheduler.client.report [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Updated inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with generation 38 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 663.652138] env[70020]: DEBUG nova.compute.provider_tree [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Updating resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d generation from 38 to 39 during operation: update_inventory {{(pid=70020) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 663.652138] env[70020]: DEBUG nova.compute.provider_tree [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 663.658016] env[70020]: DEBUG nova.compute.manager [req-19a202de-5b92-4fe7-a790-6051fd587dc7 req-23e9d408-cfb9-46ef-a94e-41bdb7ed56b8 service nova] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Received event network-changed-31ef45d2-b59a-4c2c-9fdc-f17ae158e442 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 663.658016] env[70020]: DEBUG nova.compute.manager [req-19a202de-5b92-4fe7-a790-6051fd587dc7 req-23e9d408-cfb9-46ef-a94e-41bdb7ed56b8 service nova] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Refreshing instance network info cache due to event network-changed-31ef45d2-b59a-4c2c-9fdc-f17ae158e442. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 663.658016] env[70020]: DEBUG oslo_concurrency.lockutils [req-19a202de-5b92-4fe7-a790-6051fd587dc7 req-23e9d408-cfb9-46ef-a94e-41bdb7ed56b8 service nova] Acquiring lock "refresh_cache-301b30f6-9909-4fc9-8721-88a314e4edb4" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.658016] env[70020]: DEBUG oslo_concurrency.lockutils [req-19a202de-5b92-4fe7-a790-6051fd587dc7 req-23e9d408-cfb9-46ef-a94e-41bdb7ed56b8 service nova] Acquired lock "refresh_cache-301b30f6-9909-4fc9-8721-88a314e4edb4" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 663.658016] env[70020]: DEBUG nova.network.neutron [req-19a202de-5b92-4fe7-a790-6051fd587dc7 req-23e9d408-cfb9-46ef-a94e-41bdb7ed56b8 service nova] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Refreshing network info cache for port 31ef45d2-b59a-4c2c-9fdc-f17ae158e442 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 663.769897] env[70020]: DEBUG nova.network.neutron [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Updating instance_info_cache with network_info: [{"id": "09f3d7f9-1529-498f-b393-01af888741b2", "address": "fa:16:3e:26:65:15", "network": {"id": "4b0ca2e5-2e95-4f15-9490-880c213fc0fe", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1866535662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "30d21f5de57c422db3b718ab4c760ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09f3d7f9-15", "ovs_interfaceid": "09f3d7f9-1529-498f-b393-01af888741b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.810870] env[70020]: DEBUG oslo_vmware.api [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': task-3617835, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.873656] env[70020]: INFO nova.compute.manager [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Took 33.30 seconds to build instance. [ 664.119106] env[70020]: DEBUG nova.compute.manager [None req-acc22b6f-6bb0-4687-81f2-2b3f3e1fecf5 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 664.119106] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-acc22b6f-6bb0-4687-81f2-2b3f3e1fecf5 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 664.122694] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08566d5f-5a25-4348-a280-411327022fb0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.131743] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-acc22b6f-6bb0-4687-81f2-2b3f3e1fecf5 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 664.131743] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-087dbe4b-ceef-4304-a437-377508071edc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.139655] env[70020]: DEBUG oslo_vmware.api [None req-acc22b6f-6bb0-4687-81f2-2b3f3e1fecf5 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 664.139655] env[70020]: value = "task-3617837" [ 664.139655] env[70020]: _type = "Task" [ 664.139655] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.160858] env[70020]: DEBUG oslo_concurrency.lockutils [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.762s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 664.161426] env[70020]: DEBUG nova.compute.manager [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 664.166019] env[70020]: DEBUG oslo_vmware.api [None req-acc22b6f-6bb0-4687-81f2-2b3f3e1fecf5 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3617837, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.167939] env[70020]: DEBUG oslo_concurrency.lockutils [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.412s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 664.168865] env[70020]: INFO nova.compute.claims [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 664.273283] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Releasing lock "refresh_cache-0f89d49e-d26c-4d5d-90d7-6f0bf3d67468" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 664.275171] env[70020]: DEBUG nova.compute.manager [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Instance network_info: |[{"id": "09f3d7f9-1529-498f-b393-01af888741b2", "address": "fa:16:3e:26:65:15", "network": {"id": "4b0ca2e5-2e95-4f15-9490-880c213fc0fe", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1866535662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "30d21f5de57c422db3b718ab4c760ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09f3d7f9-15", "ovs_interfaceid": "09f3d7f9-1529-498f-b393-01af888741b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 664.275440] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:26:65:15', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24ec44b7-0acf-4ff9-8bb3-4641b74af7a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '09f3d7f9-1529-498f-b393-01af888741b2', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 664.282047] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Creating folder: Project (30d21f5de57c422db3b718ab4c760ac3). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 664.282425] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7c32484d-fd7a-4b4f-aaf7-9bb436da5833 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.294013] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Created folder: Project (30d21f5de57c422db3b718ab4c760ac3) in parent group-v721521. [ 664.294225] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Creating folder: Instances. Parent ref: group-v721595. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 664.298672] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c9b5f151-fc73-4464-b7bb-894b067d61a4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.305682] env[70020]: DEBUG oslo_vmware.api [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': task-3617835, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.308673} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.307152] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 301b30f6-9909-4fc9-8721-88a314e4edb4/301b30f6-9909-4fc9-8721-88a314e4edb4.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 664.308618] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 664.308618] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Created folder: Instances in parent group-v721595. [ 664.308618] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 664.308618] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0a24f179-6853-4a8d-9573-46f1324440e4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.310406] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 664.312679] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-188e5577-3d0a-44a9-9f4c-0520b879231d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.337915] env[70020]: DEBUG oslo_vmware.api [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Waiting for the task: (returnval){ [ 664.337915] env[70020]: value = "task-3617840" [ 664.337915] env[70020]: _type = "Task" [ 664.337915] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.338893] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 664.338893] env[70020]: value = "task-3617841" [ 664.338893] env[70020]: _type = "Task" [ 664.338893] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.358711] env[70020]: DEBUG oslo_vmware.api [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': task-3617840, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.364029] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617841, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.376436] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8d63ea3e-1954-42ac-b871-be3f6be178cd tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Lock "3501a6fc-f090-4098-8f63-57a97bd61f1b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.207s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 664.546710] env[70020]: DEBUG nova.network.neutron [req-19a202de-5b92-4fe7-a790-6051fd587dc7 req-23e9d408-cfb9-46ef-a94e-41bdb7ed56b8 service nova] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Updated VIF entry in instance network info cache for port 31ef45d2-b59a-4c2c-9fdc-f17ae158e442. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 664.546710] env[70020]: DEBUG nova.network.neutron [req-19a202de-5b92-4fe7-a790-6051fd587dc7 req-23e9d408-cfb9-46ef-a94e-41bdb7ed56b8 service nova] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Updating instance_info_cache with network_info: [{"id": "31ef45d2-b59a-4c2c-9fdc-f17ae158e442", "address": "fa:16:3e:33:32:4f", "network": {"id": "47f58371-9cbc-4ed3-98be-09900c36cbf3", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-119128355-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90641c26c4064f219bf2e52694da4e0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a94c29-ddd5-4383-9219-1c2c3bb09cc5", "external-id": "nsx-vlan-transportzone-2", "segmentation_id": 2, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31ef45d2-b5", "ovs_interfaceid": "31ef45d2-b59a-4c2c-9fdc-f17ae158e442", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.651285] env[70020]: DEBUG oslo_vmware.api [None req-acc22b6f-6bb0-4687-81f2-2b3f3e1fecf5 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3617837, 'name': PowerOffVM_Task, 'duration_secs': 0.302195} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.651775] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-acc22b6f-6bb0-4687-81f2-2b3f3e1fecf5 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 664.652029] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-acc22b6f-6bb0-4687-81f2-2b3f3e1fecf5 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 664.652344] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-555649b3-99b3-4313-9676-4cf28fc90ca2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.678031] env[70020]: DEBUG nova.compute.utils [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 664.686147] env[70020]: DEBUG nova.compute.manager [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 664.686348] env[70020]: DEBUG nova.network.neutron [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 664.726385] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-acc22b6f-6bb0-4687-81f2-2b3f3e1fecf5 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 664.726700] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-acc22b6f-6bb0-4687-81f2-2b3f3e1fecf5 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 664.726797] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-acc22b6f-6bb0-4687-81f2-2b3f3e1fecf5 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Deleting the datastore file [datastore2] d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 664.727042] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6be60537-88a8-47a9-8db5-649c052424fc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.736406] env[70020]: DEBUG oslo_vmware.api [None req-acc22b6f-6bb0-4687-81f2-2b3f3e1fecf5 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 664.736406] env[70020]: value = "task-3617844" [ 664.736406] env[70020]: _type = "Task" [ 664.736406] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.746748] env[70020]: DEBUG oslo_vmware.api [None req-acc22b6f-6bb0-4687-81f2-2b3f3e1fecf5 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3617844, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.748796] env[70020]: DEBUG nova.policy [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b511ec320fdc4dacab9e6f66a50f625c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'affdcbe1612b434697a53a8692ef77a4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 664.849104] env[70020]: DEBUG oslo_vmware.api [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': task-3617840, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.123605} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.853990] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 664.855674] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fdac013-10ec-4304-a739-bb960f8dbd7b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.892270] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Reconfiguring VM instance instance-00000014 to attach disk [datastore1] 301b30f6-9909-4fc9-8721-88a314e4edb4/301b30f6-9909-4fc9-8721-88a314e4edb4.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 664.892758] env[70020]: DEBUG nova.compute.manager [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 664.896163] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617841, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.898023] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-52a5aecd-f83e-4b70-a5e3-285ad8b5c771 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.921173] env[70020]: DEBUG oslo_vmware.api [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Waiting for the task: (returnval){ [ 664.921173] env[70020]: value = "task-3617845" [ 664.921173] env[70020]: _type = "Task" [ 664.921173] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.937246] env[70020]: DEBUG oslo_vmware.api [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': task-3617845, 'name': ReconfigVM_Task} progress is 10%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.052252] env[70020]: DEBUG oslo_concurrency.lockutils [req-19a202de-5b92-4fe7-a790-6051fd587dc7 req-23e9d408-cfb9-46ef-a94e-41bdb7ed56b8 service nova] Releasing lock "refresh_cache-301b30f6-9909-4fc9-8721-88a314e4edb4" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 665.053454] env[70020]: DEBUG nova.compute.manager [req-19a202de-5b92-4fe7-a790-6051fd587dc7 req-23e9d408-cfb9-46ef-a94e-41bdb7ed56b8 service nova] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Received event network-vif-plugged-09f3d7f9-1529-498f-b393-01af888741b2 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 665.054120] env[70020]: DEBUG oslo_concurrency.lockutils [req-19a202de-5b92-4fe7-a790-6051fd587dc7 req-23e9d408-cfb9-46ef-a94e-41bdb7ed56b8 service nova] Acquiring lock "0f89d49e-d26c-4d5d-90d7-6f0bf3d67468-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 665.055131] env[70020]: DEBUG oslo_concurrency.lockutils [req-19a202de-5b92-4fe7-a790-6051fd587dc7 req-23e9d408-cfb9-46ef-a94e-41bdb7ed56b8 service nova] Lock "0f89d49e-d26c-4d5d-90d7-6f0bf3d67468-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 665.056146] env[70020]: DEBUG oslo_concurrency.lockutils [req-19a202de-5b92-4fe7-a790-6051fd587dc7 req-23e9d408-cfb9-46ef-a94e-41bdb7ed56b8 service nova] Lock "0f89d49e-d26c-4d5d-90d7-6f0bf3d67468-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 665.056432] env[70020]: DEBUG nova.compute.manager [req-19a202de-5b92-4fe7-a790-6051fd587dc7 req-23e9d408-cfb9-46ef-a94e-41bdb7ed56b8 service nova] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] No waiting events found dispatching network-vif-plugged-09f3d7f9-1529-498f-b393-01af888741b2 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 665.058358] env[70020]: WARNING nova.compute.manager [req-19a202de-5b92-4fe7-a790-6051fd587dc7 req-23e9d408-cfb9-46ef-a94e-41bdb7ed56b8 service nova] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Received unexpected event network-vif-plugged-09f3d7f9-1529-498f-b393-01af888741b2 for instance with vm_state building and task_state spawning. [ 665.149816] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ba5f8b3-e9ea-4f83-ae79-d43b6369eaeb tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Volume attach. Driver type: vmdk {{(pid=70020) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 665.150447] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ba5f8b3-e9ea-4f83-ae79-d43b6369eaeb tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721591', 'volume_id': 'e54a3dde-2eb6-4c6d-aea0-e0c46f4906bd', 'name': 'volume-e54a3dde-2eb6-4c6d-aea0-e0c46f4906bd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1f95bfa8-bc97-4ed7-8c33-c00297430bf5', 'attached_at': '', 'detached_at': '', 'volume_id': 'e54a3dde-2eb6-4c6d-aea0-e0c46f4906bd', 'serial': 'e54a3dde-2eb6-4c6d-aea0-e0c46f4906bd'} {{(pid=70020) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 665.151567] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65086d01-e115-4710-a37f-0e58f1a0d2b9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.174183] env[70020]: DEBUG nova.network.neutron [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Successfully created port: bfb6aa7f-bef1-4a61-8430-16719d55f556 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 665.178180] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca34bc8d-4ffa-4257-a149-df70c190b5e3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.199142] env[70020]: DEBUG nova.compute.manager [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 665.213705] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ba5f8b3-e9ea-4f83-ae79-d43b6369eaeb tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Reconfiguring VM instance instance-00000006 to attach disk [datastore1] volume-e54a3dde-2eb6-4c6d-aea0-e0c46f4906bd/volume-e54a3dde-2eb6-4c6d-aea0-e0c46f4906bd.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 665.215170] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-efe6e0c3-6929-470d-8cc1-ab67cd18b99a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.239615] env[70020]: DEBUG oslo_vmware.api [None req-8ba5f8b3-e9ea-4f83-ae79-d43b6369eaeb tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Waiting for the task: (returnval){ [ 665.239615] env[70020]: value = "task-3617846" [ 665.239615] env[70020]: _type = "Task" [ 665.239615] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.251478] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Acquiring lock "a09db142-60d1-4a62-8e76-1e2e3676124f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 665.251741] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Lock "a09db142-60d1-4a62-8e76-1e2e3676124f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 665.262965] env[70020]: DEBUG oslo_vmware.api [None req-acc22b6f-6bb0-4687-81f2-2b3f3e1fecf5 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3617844, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.479808} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.266259] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-acc22b6f-6bb0-4687-81f2-2b3f3e1fecf5 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 665.266434] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-acc22b6f-6bb0-4687-81f2-2b3f3e1fecf5 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 665.266508] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-acc22b6f-6bb0-4687-81f2-2b3f3e1fecf5 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 665.266676] env[70020]: INFO nova.compute.manager [None req-acc22b6f-6bb0-4687-81f2-2b3f3e1fecf5 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Took 1.15 seconds to destroy the instance on the hypervisor. [ 665.266916] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-acc22b6f-6bb0-4687-81f2-2b3f3e1fecf5 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 665.267147] env[70020]: DEBUG oslo_vmware.api [None req-8ba5f8b3-e9ea-4f83-ae79-d43b6369eaeb tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3617846, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.267641] env[70020]: DEBUG nova.compute.manager [-] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 665.267749] env[70020]: DEBUG nova.network.neutron [-] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 665.360950] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617841, 'name': CreateVM_Task, 'duration_secs': 0.542476} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.360950] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 665.361438] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.361643] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 665.361948] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 665.362262] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86147c89-d924-4f61-ad29-45475ae0359f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.369617] env[70020]: DEBUG oslo_vmware.api [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Waiting for the task: (returnval){ [ 665.369617] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5277ca28-2eaa-ab83-5e65-a278de20e72d" [ 665.369617] env[70020]: _type = "Task" [ 665.369617] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.380951] env[70020]: DEBUG oslo_vmware.api [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5277ca28-2eaa-ab83-5e65-a278de20e72d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.418834] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 665.433102] env[70020]: DEBUG oslo_vmware.api [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': task-3617845, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.496291] env[70020]: DEBUG oslo_concurrency.lockutils [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Acquiring lock "19036f6f-2ee3-4ea5-82fa-b510bf903922" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 665.496291] env[70020]: DEBUG oslo_concurrency.lockutils [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Lock "19036f6f-2ee3-4ea5-82fa-b510bf903922" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 665.754479] env[70020]: DEBUG oslo_vmware.api [None req-8ba5f8b3-e9ea-4f83-ae79-d43b6369eaeb tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3617846, 'name': ReconfigVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.765759] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45f03a63-67ba-4ddd-b336-f17d00919780 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.793895] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dee3b99-0bcc-42ef-99e6-e7f197562633 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.838979] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b0b1bad-08fe-416c-a54f-3c3f2206f227 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.851538] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e32f4c8-2a73-42e8-82c8-8c9025d6fe67 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.879014] env[70020]: DEBUG nova.compute.provider_tree [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 665.896237] env[70020]: DEBUG oslo_vmware.api [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5277ca28-2eaa-ab83-5e65-a278de20e72d, 'name': SearchDatastore_Task, 'duration_secs': 0.018874} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.896796] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 665.897168] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 665.897810] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.897896] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 665.898153] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 665.898926] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f8b9c1a4-e252-457e-b943-e6a9a7bb7506 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.909598] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 665.909969] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 665.911204] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-abb432dc-dd90-446f-a2cb-22466fabe892 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.919818] env[70020]: DEBUG oslo_vmware.api [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Waiting for the task: (returnval){ [ 665.919818] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52cec22d-bc7d-238f-7e84-7a247727b157" [ 665.919818] env[70020]: _type = "Task" [ 665.919818] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.936016] env[70020]: DEBUG oslo_vmware.api [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52cec22d-bc7d-238f-7e84-7a247727b157, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.941452] env[70020]: DEBUG oslo_vmware.api [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': task-3617845, 'name': ReconfigVM_Task, 'duration_secs': 0.898483} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.941937] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Reconfigured VM instance instance-00000014 to attach disk [datastore1] 301b30f6-9909-4fc9-8721-88a314e4edb4/301b30f6-9909-4fc9-8721-88a314e4edb4.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 665.942930] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-355fc757-f48d-4672-85c9-a61e193675a2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.951386] env[70020]: DEBUG oslo_vmware.api [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Waiting for the task: (returnval){ [ 665.951386] env[70020]: value = "task-3617848" [ 665.951386] env[70020]: _type = "Task" [ 665.951386] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.963579] env[70020]: DEBUG oslo_vmware.api [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': task-3617848, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.225539] env[70020]: DEBUG nova.compute.manager [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 666.254451] env[70020]: DEBUG oslo_vmware.api [None req-8ba5f8b3-e9ea-4f83-ae79-d43b6369eaeb tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3617846, 'name': ReconfigVM_Task, 'duration_secs': 0.551341} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.254788] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ba5f8b3-e9ea-4f83-ae79-d43b6369eaeb tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Reconfigured VM instance instance-00000006 to attach disk [datastore1] volume-e54a3dde-2eb6-4c6d-aea0-e0c46f4906bd/volume-e54a3dde-2eb6-4c6d-aea0-e0c46f4906bd.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 666.262331] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9610ae07-88bd-41ad-8bb8-092b95b2e319 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.280623] env[70020]: DEBUG oslo_vmware.api [None req-8ba5f8b3-e9ea-4f83-ae79-d43b6369eaeb tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Waiting for the task: (returnval){ [ 666.280623] env[70020]: value = "task-3617849" [ 666.280623] env[70020]: _type = "Task" [ 666.280623] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.283807] env[70020]: DEBUG nova.virt.hardware [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 666.284122] env[70020]: DEBUG nova.virt.hardware [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 666.285248] env[70020]: DEBUG nova.virt.hardware [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 666.287743] env[70020]: DEBUG nova.virt.hardware [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 666.287743] env[70020]: DEBUG nova.virt.hardware [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 666.287743] env[70020]: DEBUG nova.virt.hardware [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 666.287743] env[70020]: DEBUG nova.virt.hardware [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 666.287743] env[70020]: DEBUG nova.virt.hardware [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 666.287985] env[70020]: DEBUG nova.virt.hardware [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 666.287985] env[70020]: DEBUG nova.virt.hardware [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 666.287985] env[70020]: DEBUG nova.virt.hardware [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 666.288706] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1d362ca-eca1-4787-a8ee-d84539069e61 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.306912] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11895465-b6fd-42b3-abc6-3711fd0acb90 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.312100] env[70020]: DEBUG oslo_vmware.api [None req-8ba5f8b3-e9ea-4f83-ae79-d43b6369eaeb tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3617849, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.404820] env[70020]: ERROR nova.scheduler.client.report [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [req-e1a49854-33a0-4e1a-a7ad-841f87282288] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e1a49854-33a0-4e1a-a7ad-841f87282288"}]} [ 666.426630] env[70020]: DEBUG nova.scheduler.client.report [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 666.432462] env[70020]: DEBUG oslo_vmware.api [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52cec22d-bc7d-238f-7e84-7a247727b157, 'name': SearchDatastore_Task, 'duration_secs': 0.025647} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.433254] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-649d5e49-b456-4212-856e-795daea2e7d1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.438749] env[70020]: DEBUG oslo_vmware.api [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Waiting for the task: (returnval){ [ 666.438749] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52bc1df5-7200-f648-61e9-ecf1c9de335c" [ 666.438749] env[70020]: _type = "Task" [ 666.438749] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.442599] env[70020]: DEBUG nova.scheduler.client.report [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 666.442808] env[70020]: DEBUG nova.compute.provider_tree [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 666.454020] env[70020]: DEBUG oslo_vmware.api [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52bc1df5-7200-f648-61e9-ecf1c9de335c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.461799] env[70020]: DEBUG oslo_vmware.api [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': task-3617848, 'name': Rename_Task, 'duration_secs': 0.183311} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.462892] env[70020]: DEBUG nova.scheduler.client.report [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 666.465868] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 666.465868] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b002c8c8-60f1-4114-89f8-741b3c641e2a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.471368] env[70020]: DEBUG oslo_vmware.api [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Waiting for the task: (returnval){ [ 666.471368] env[70020]: value = "task-3617850" [ 666.471368] env[70020]: _type = "Task" [ 666.471368] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.480980] env[70020]: DEBUG oslo_vmware.api [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': task-3617850, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.483745] env[70020]: DEBUG nova.scheduler.client.report [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 666.570899] env[70020]: DEBUG nova.network.neutron [-] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 666.799505] env[70020]: DEBUG oslo_vmware.api [None req-8ba5f8b3-e9ea-4f83-ae79-d43b6369eaeb tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3617849, 'name': ReconfigVM_Task, 'duration_secs': 0.153506} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.799819] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ba5f8b3-e9ea-4f83-ae79-d43b6369eaeb tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721591', 'volume_id': 'e54a3dde-2eb6-4c6d-aea0-e0c46f4906bd', 'name': 'volume-e54a3dde-2eb6-4c6d-aea0-e0c46f4906bd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1f95bfa8-bc97-4ed7-8c33-c00297430bf5', 'attached_at': '', 'detached_at': '', 'volume_id': 'e54a3dde-2eb6-4c6d-aea0-e0c46f4906bd', 'serial': 'e54a3dde-2eb6-4c6d-aea0-e0c46f4906bd'} {{(pid=70020) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 666.956061] env[70020]: DEBUG oslo_vmware.api [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52bc1df5-7200-f648-61e9-ecf1c9de335c, 'name': SearchDatastore_Task, 'duration_secs': 0.010637} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.956362] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 666.956454] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468/0f89d49e-d26c-4d5d-90d7-6f0bf3d67468.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 666.957161] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-204bb559-1bbd-4700-88e8-ffff7f10efa1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.970621] env[70020]: DEBUG oslo_vmware.api [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Waiting for the task: (returnval){ [ 666.970621] env[70020]: value = "task-3617852" [ 666.970621] env[70020]: _type = "Task" [ 666.970621] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.979121] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56d4ea8a-226d-462c-94be-e72251f311b2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.985350] env[70020]: DEBUG oslo_vmware.api [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Task: {'id': task-3617852, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.990995] env[70020]: DEBUG oslo_vmware.api [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': task-3617850, 'name': PowerOnVM_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.994069] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c77d65fa-28aa-44d4-b406-2a93051201b4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.030949] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aeb4fa3-8ea7-432a-bd21-3c2ae383c1c9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.039824] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b7afbbf-1181-4e59-914a-678fb3d0aa0c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.054701] env[70020]: DEBUG nova.compute.provider_tree [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 667.073772] env[70020]: INFO nova.compute.manager [-] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Took 1.81 seconds to deallocate network for instance. [ 667.123854] env[70020]: DEBUG nova.compute.manager [req-e0f3676a-f5c1-471b-817a-efcc724084ea req-644b633f-e1eb-498e-a597-a2f10eef4dbf service nova] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Received event network-changed-09f3d7f9-1529-498f-b393-01af888741b2 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 667.124102] env[70020]: DEBUG nova.compute.manager [req-e0f3676a-f5c1-471b-817a-efcc724084ea req-644b633f-e1eb-498e-a597-a2f10eef4dbf service nova] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Refreshing instance network info cache due to event network-changed-09f3d7f9-1529-498f-b393-01af888741b2. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 667.124277] env[70020]: DEBUG oslo_concurrency.lockutils [req-e0f3676a-f5c1-471b-817a-efcc724084ea req-644b633f-e1eb-498e-a597-a2f10eef4dbf service nova] Acquiring lock "refresh_cache-0f89d49e-d26c-4d5d-90d7-6f0bf3d67468" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 667.124416] env[70020]: DEBUG oslo_concurrency.lockutils [req-e0f3676a-f5c1-471b-817a-efcc724084ea req-644b633f-e1eb-498e-a597-a2f10eef4dbf service nova] Acquired lock "refresh_cache-0f89d49e-d26c-4d5d-90d7-6f0bf3d67468" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 667.124557] env[70020]: DEBUG nova.network.neutron [req-e0f3676a-f5c1-471b-817a-efcc724084ea req-644b633f-e1eb-498e-a597-a2f10eef4dbf service nova] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Refreshing network info cache for port 09f3d7f9-1529-498f-b393-01af888741b2 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 667.259322] env[70020]: DEBUG nova.network.neutron [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Successfully updated port: bfb6aa7f-bef1-4a61-8430-16719d55f556 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 667.270422] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Acquiring lock "6c36df58-3ab3-4595-b89c-9ab5a4664eec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 667.270890] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Lock "6c36df58-3ab3-4595-b89c-9ab5a4664eec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 667.364515] env[70020]: DEBUG oslo_concurrency.lockutils [None req-31006f5b-23ce-45a2-8bd2-c1f83f75c4db tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Acquiring lock "6a114dce-7ed3-46e1-9d50-c3dd6efd340c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 667.364918] env[70020]: DEBUG oslo_concurrency.lockutils [None req-31006f5b-23ce-45a2-8bd2-c1f83f75c4db tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Lock "6a114dce-7ed3-46e1-9d50-c3dd6efd340c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 667.365222] env[70020]: DEBUG oslo_concurrency.lockutils [None req-31006f5b-23ce-45a2-8bd2-c1f83f75c4db tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Acquiring lock "6a114dce-7ed3-46e1-9d50-c3dd6efd340c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 667.365476] env[70020]: DEBUG oslo_concurrency.lockutils [None req-31006f5b-23ce-45a2-8bd2-c1f83f75c4db tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Lock "6a114dce-7ed3-46e1-9d50-c3dd6efd340c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 667.365742] env[70020]: DEBUG oslo_concurrency.lockutils [None req-31006f5b-23ce-45a2-8bd2-c1f83f75c4db tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Lock "6a114dce-7ed3-46e1-9d50-c3dd6efd340c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 667.368544] env[70020]: INFO nova.compute.manager [None req-31006f5b-23ce-45a2-8bd2-c1f83f75c4db tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Terminating instance [ 667.417478] env[70020]: DEBUG nova.compute.manager [req-47e3f471-d6e2-4446-8510-b9a056d5f272 req-fe065d12-e91f-4315-9c0f-e1be4b8619a0 service nova] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Received event network-vif-deleted-75bc1ac5-ec35-4914-abff-0fc9c5eb7bf9 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 667.490491] env[70020]: DEBUG oslo_vmware.api [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Task: {'id': task-3617852, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.494633] env[70020]: DEBUG oslo_vmware.api [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': task-3617850, 'name': PowerOnVM_Task, 'duration_secs': 0.659268} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.494983] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 667.495224] env[70020]: INFO nova.compute.manager [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Took 12.07 seconds to spawn the instance on the hypervisor. [ 667.495408] env[70020]: DEBUG nova.compute.manager [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 667.496256] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-070c17c8-3a05-44ec-a038-1dc185d05313 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.581092] env[70020]: DEBUG oslo_concurrency.lockutils [None req-acc22b6f-6bb0-4687-81f2-2b3f3e1fecf5 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 667.581602] env[70020]: ERROR nova.scheduler.client.report [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [req-59205b82-13fe-46bb-ba7e-dde41849afa3] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-59205b82-13fe-46bb-ba7e-dde41849afa3"}]} [ 667.601274] env[70020]: DEBUG nova.scheduler.client.report [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 667.623953] env[70020]: DEBUG nova.scheduler.client.report [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 667.624159] env[70020]: DEBUG nova.compute.provider_tree [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 667.648135] env[70020]: DEBUG nova.scheduler.client.report [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 667.679990] env[70020]: DEBUG nova.scheduler.client.report [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 667.765252] env[70020]: DEBUG oslo_concurrency.lockutils [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Acquiring lock "refresh_cache-832a38c8-ed3a-460b-91bd-0138d2f2d03d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 667.765499] env[70020]: DEBUG oslo_concurrency.lockutils [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Acquired lock "refresh_cache-832a38c8-ed3a-460b-91bd-0138d2f2d03d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 667.765702] env[70020]: DEBUG nova.network.neutron [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 667.854250] env[70020]: DEBUG nova.objects.instance [None req-8ba5f8b3-e9ea-4f83-ae79-d43b6369eaeb tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Lazy-loading 'flavor' on Instance uuid 1f95bfa8-bc97-4ed7-8c33-c00297430bf5 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 667.873948] env[70020]: DEBUG nova.compute.manager [None req-31006f5b-23ce-45a2-8bd2-c1f83f75c4db tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 667.874198] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-31006f5b-23ce-45a2-8bd2-c1f83f75c4db tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 667.875256] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-261bbb64-dfe7-4f51-9996-1e576ab00620 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.887779] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-31006f5b-23ce-45a2-8bd2-c1f83f75c4db tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 667.887899] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f7a7f131-82f7-4873-88b6-a6e818eabf1f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.894664] env[70020]: DEBUG oslo_vmware.api [None req-31006f5b-23ce-45a2-8bd2-c1f83f75c4db tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Waiting for the task: (returnval){ [ 667.894664] env[70020]: value = "task-3617853" [ 667.894664] env[70020]: _type = "Task" [ 667.894664] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.904339] env[70020]: DEBUG oslo_vmware.api [None req-31006f5b-23ce-45a2-8bd2-c1f83f75c4db tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Task: {'id': task-3617853, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.987529] env[70020]: DEBUG oslo_vmware.api [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Task: {'id': task-3617852, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.567749} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.990674] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468/0f89d49e-d26c-4d5d-90d7-6f0bf3d67468.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 667.991054] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 667.995093] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-687f9017-3584-4d8a-b611-6be342e5d56b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.006711] env[70020]: DEBUG oslo_vmware.api [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Waiting for the task: (returnval){ [ 668.006711] env[70020]: value = "task-3617854" [ 668.006711] env[70020]: _type = "Task" [ 668.006711] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.021804] env[70020]: DEBUG oslo_vmware.api [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Task: {'id': task-3617854, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.025183] env[70020]: INFO nova.compute.manager [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Took 33.26 seconds to build instance. [ 668.085732] env[70020]: DEBUG nova.network.neutron [req-e0f3676a-f5c1-471b-817a-efcc724084ea req-644b633f-e1eb-498e-a597-a2f10eef4dbf service nova] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Updated VIF entry in instance network info cache for port 09f3d7f9-1529-498f-b393-01af888741b2. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 668.086156] env[70020]: DEBUG nova.network.neutron [req-e0f3676a-f5c1-471b-817a-efcc724084ea req-644b633f-e1eb-498e-a597-a2f10eef4dbf service nova] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Updating instance_info_cache with network_info: [{"id": "09f3d7f9-1529-498f-b393-01af888741b2", "address": "fa:16:3e:26:65:15", "network": {"id": "4b0ca2e5-2e95-4f15-9490-880c213fc0fe", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1866535662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "30d21f5de57c422db3b718ab4c760ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09f3d7f9-15", "ovs_interfaceid": "09f3d7f9-1529-498f-b393-01af888741b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.206261] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cdf63d5-8345-48e2-911c-57753587399a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.215502] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ee56f9b-5e14-4066-af55-4adf49f340e9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.253105] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9eb3a214-7a4b-4fda-831a-d21a46069753 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Acquiring lock "79d98176-b566-4349-ad10-c2ea6fdbc657" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 668.253829] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9eb3a214-7a4b-4fda-831a-d21a46069753 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Lock "79d98176-b566-4349-ad10-c2ea6fdbc657" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 668.253829] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9eb3a214-7a4b-4fda-831a-d21a46069753 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Acquiring lock "79d98176-b566-4349-ad10-c2ea6fdbc657-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 668.253829] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9eb3a214-7a4b-4fda-831a-d21a46069753 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Lock "79d98176-b566-4349-ad10-c2ea6fdbc657-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 668.254357] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9eb3a214-7a4b-4fda-831a-d21a46069753 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Lock "79d98176-b566-4349-ad10-c2ea6fdbc657-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 668.260035] env[70020]: INFO nova.compute.manager [None req-9eb3a214-7a4b-4fda-831a-d21a46069753 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Terminating instance [ 668.260524] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fd6a002-4e72-4249-9cf9-11ef97422a4d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.270282] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d154b77c-524f-4b6a-9208-5e6098bac25e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.288248] env[70020]: DEBUG nova.compute.provider_tree [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 668.323621] env[70020]: DEBUG nova.network.neutron [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 668.358936] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8ba5f8b3-e9ea-4f83-ae79-d43b6369eaeb tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Lock "1f95bfa8-bc97-4ed7-8c33-c00297430bf5" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.394s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 668.405340] env[70020]: DEBUG oslo_vmware.api [None req-31006f5b-23ce-45a2-8bd2-c1f83f75c4db tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Task: {'id': task-3617853, 'name': PowerOffVM_Task, 'duration_secs': 0.209517} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.406462] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-31006f5b-23ce-45a2-8bd2-c1f83f75c4db tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 668.406462] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-31006f5b-23ce-45a2-8bd2-c1f83f75c4db tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 668.406462] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0211b8eb-fc19-47a1-936d-4f55a2e25259 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.471954] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-31006f5b-23ce-45a2-8bd2-c1f83f75c4db tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 668.471954] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-31006f5b-23ce-45a2-8bd2-c1f83f75c4db tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 668.471954] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-31006f5b-23ce-45a2-8bd2-c1f83f75c4db tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Deleting the datastore file [datastore1] 6a114dce-7ed3-46e1-9d50-c3dd6efd340c {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 668.471954] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1d4f7402-458c-4b7a-8d3e-34e3688c4551 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.477578] env[70020]: DEBUG oslo_vmware.api [None req-31006f5b-23ce-45a2-8bd2-c1f83f75c4db tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Waiting for the task: (returnval){ [ 668.477578] env[70020]: value = "task-3617856" [ 668.477578] env[70020]: _type = "Task" [ 668.477578] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.488743] env[70020]: DEBUG oslo_vmware.api [None req-31006f5b-23ce-45a2-8bd2-c1f83f75c4db tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Task: {'id': task-3617856, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.520596] env[70020]: DEBUG oslo_vmware.api [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Task: {'id': task-3617854, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.180555} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.520907] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 668.521740] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c053b62e-2821-4a3b-942d-ce1631864698 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.525395] env[70020]: DEBUG nova.network.neutron [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Updating instance_info_cache with network_info: [{"id": "bfb6aa7f-bef1-4a61-8430-16719d55f556", "address": "fa:16:3e:f3:07:11", "network": {"id": "2c56b682-8e9a-4fb4-9724-a34a00bb4455", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1229093518-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "affdcbe1612b434697a53a8692ef77a4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4d548e7-d762-406a-bb2d-dc7168a8ca67", "external-id": "nsx-vlan-transportzone-796", "segmentation_id": 796, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbfb6aa7f-be", "ovs_interfaceid": "bfb6aa7f-bef1-4a61-8430-16719d55f556", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.527358] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e8aa8689-d52b-4690-b2d2-c791159bf215 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Lock "301b30f6-9909-4fc9-8721-88a314e4edb4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.770s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 668.546645] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Reconfiguring VM instance instance-00000015 to attach disk [datastore1] 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468/0f89d49e-d26c-4d5d-90d7-6f0bf3d67468.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 668.548242] env[70020]: DEBUG nova.compute.manager [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 668.550764] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5452ea81-3a6d-45ea-9fbf-ade3bf232f9d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.570654] env[70020]: DEBUG oslo_vmware.api [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Waiting for the task: (returnval){ [ 668.570654] env[70020]: value = "task-3617857" [ 668.570654] env[70020]: _type = "Task" [ 668.570654] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.582769] env[70020]: DEBUG oslo_vmware.api [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Task: {'id': task-3617857, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.589672] env[70020]: DEBUG oslo_concurrency.lockutils [req-e0f3676a-f5c1-471b-817a-efcc724084ea req-644b633f-e1eb-498e-a597-a2f10eef4dbf service nova] Releasing lock "refresh_cache-0f89d49e-d26c-4d5d-90d7-6f0bf3d67468" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 668.767947] env[70020]: DEBUG nova.compute.manager [None req-9eb3a214-7a4b-4fda-831a-d21a46069753 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 668.771473] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9eb3a214-7a4b-4fda-831a-d21a46069753 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 668.772470] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7310130e-57dc-4612-87e7-2b8a5cc1c5d6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.782279] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-9eb3a214-7a4b-4fda-831a-d21a46069753 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 668.782566] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-81f0efee-6c97-4764-a1ae-17a6713dfa6b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.797034] env[70020]: DEBUG oslo_vmware.api [None req-9eb3a214-7a4b-4fda-831a-d21a46069753 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Waiting for the task: (returnval){ [ 668.797034] env[70020]: value = "task-3617858" [ 668.797034] env[70020]: _type = "Task" [ 668.797034] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.809015] env[70020]: DEBUG oslo_vmware.api [None req-9eb3a214-7a4b-4fda-831a-d21a46069753 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Task: {'id': task-3617858, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.851449] env[70020]: DEBUG nova.scheduler.client.report [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Updated inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with generation 42 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 668.851449] env[70020]: DEBUG nova.compute.provider_tree [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Updating resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d generation from 42 to 43 during operation: update_inventory {{(pid=70020) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 668.851449] env[70020]: DEBUG nova.compute.provider_tree [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 668.989703] env[70020]: DEBUG oslo_vmware.api [None req-31006f5b-23ce-45a2-8bd2-c1f83f75c4db tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Task: {'id': task-3617856, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.198135} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.991756] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-31006f5b-23ce-45a2-8bd2-c1f83f75c4db tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 668.991756] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-31006f5b-23ce-45a2-8bd2-c1f83f75c4db tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 668.991756] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-31006f5b-23ce-45a2-8bd2-c1f83f75c4db tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 668.991756] env[70020]: INFO nova.compute.manager [None req-31006f5b-23ce-45a2-8bd2-c1f83f75c4db tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Took 1.12 seconds to destroy the instance on the hypervisor. [ 668.991756] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-31006f5b-23ce-45a2-8bd2-c1f83f75c4db tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 668.992156] env[70020]: DEBUG nova.compute.manager [-] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 668.992156] env[70020]: DEBUG nova.network.neutron [-] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 669.027966] env[70020]: DEBUG oslo_concurrency.lockutils [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Releasing lock "refresh_cache-832a38c8-ed3a-460b-91bd-0138d2f2d03d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 669.028158] env[70020]: DEBUG nova.compute.manager [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Instance network_info: |[{"id": "bfb6aa7f-bef1-4a61-8430-16719d55f556", "address": "fa:16:3e:f3:07:11", "network": {"id": "2c56b682-8e9a-4fb4-9724-a34a00bb4455", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1229093518-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "affdcbe1612b434697a53a8692ef77a4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4d548e7-d762-406a-bb2d-dc7168a8ca67", "external-id": "nsx-vlan-transportzone-796", "segmentation_id": 796, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbfb6aa7f-be", "ovs_interfaceid": "bfb6aa7f-bef1-4a61-8430-16719d55f556", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 669.028672] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f3:07:11', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b4d548e7-d762-406a-bb2d-dc7168a8ca67', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bfb6aa7f-bef1-4a61-8430-16719d55f556', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 669.038230] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Creating folder: Project (affdcbe1612b434697a53a8692ef77a4). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 669.038569] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e1b85ad7-8555-488a-a227-2d2832ab432d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.051890] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Created folder: Project (affdcbe1612b434697a53a8692ef77a4) in parent group-v721521. [ 669.051890] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Creating folder: Instances. Parent ref: group-v721599. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 669.051890] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e147de25-2536-4cb7-9ba3-44aafa5073c3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.061425] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Created folder: Instances in parent group-v721599. [ 669.061981] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 669.061981] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 669.062319] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9ef9cdda-48d1-49f7-b0be-79d2a30cdac9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.095736] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 669.095736] env[70020]: value = "task-3617862" [ 669.095736] env[70020]: _type = "Task" [ 669.095736] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.108908] env[70020]: DEBUG oslo_vmware.api [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Task: {'id': task-3617857, 'name': ReconfigVM_Task, 'duration_secs': 0.29218} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.108908] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Reconfigured VM instance instance-00000015 to attach disk [datastore1] 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468/0f89d49e-d26c-4d5d-90d7-6f0bf3d67468.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 669.108908] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7a54c703-4bd5-4660-9852-4c7fe2bb675d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.115740] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617862, 'name': CreateVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.124742] env[70020]: DEBUG oslo_vmware.api [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Waiting for the task: (returnval){ [ 669.124742] env[70020]: value = "task-3617863" [ 669.124742] env[70020]: _type = "Task" [ 669.124742] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.125778] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 669.143314] env[70020]: DEBUG oslo_vmware.api [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Task: {'id': task-3617863, 'name': Rename_Task} progress is 10%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.144203] env[70020]: DEBUG oslo_vmware.rw_handles [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c8dcfb-90b9-4089-17f9-3288b6cdfe64/disk-0.vmdk. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 669.145371] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17c2351d-9b2e-4c67-aa5d-a063114538c5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.159678] env[70020]: DEBUG oslo_vmware.rw_handles [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c8dcfb-90b9-4089-17f9-3288b6cdfe64/disk-0.vmdk is in state: ready. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 669.161199] env[70020]: ERROR oslo_vmware.rw_handles [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c8dcfb-90b9-4089-17f9-3288b6cdfe64/disk-0.vmdk due to incomplete transfer. [ 669.161199] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-47a76ffb-35e1-4f27-a726-3f4eb081f042 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.171283] env[70020]: DEBUG oslo_vmware.rw_handles [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c8dcfb-90b9-4089-17f9-3288b6cdfe64/disk-0.vmdk. {{(pid=70020) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 669.171733] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Uploaded image ef5754c9-6b33-4609-bfbc-a01eecd8d813 to the Glance image server {{(pid=70020) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 669.174373] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Destroying the VM {{(pid=70020) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 669.174373] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-de7015c5-23b8-410e-b98e-48ab3f229d31 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.183026] env[70020]: DEBUG oslo_vmware.api [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Waiting for the task: (returnval){ [ 669.183026] env[70020]: value = "task-3617864" [ 669.183026] env[70020]: _type = "Task" [ 669.183026] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.193930] env[70020]: DEBUG oslo_vmware.api [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617864, 'name': Destroy_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.313875] env[70020]: DEBUG oslo_vmware.api [None req-9eb3a214-7a4b-4fda-831a-d21a46069753 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Task: {'id': task-3617858, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.353061] env[70020]: DEBUG oslo_concurrency.lockutils [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 5.186s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 669.353775] env[70020]: DEBUG nova.compute.manager [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 669.361023] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 21.831s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 669.361023] env[70020]: DEBUG nova.objects.instance [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Trying to apply a migration context that does not seem to be set for this instance {{(pid=70020) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 669.606775] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617862, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.635928] env[70020]: DEBUG oslo_vmware.api [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Task: {'id': task-3617863, 'name': Rename_Task, 'duration_secs': 0.329907} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.636265] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 669.636531] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f1ca4116-cca9-418d-9e3d-87061f70e1a6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.643289] env[70020]: DEBUG oslo_vmware.api [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Waiting for the task: (returnval){ [ 669.643289] env[70020]: value = "task-3617865" [ 669.643289] env[70020]: _type = "Task" [ 669.643289] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.651979] env[70020]: DEBUG oslo_vmware.api [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Task: {'id': task-3617865, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.694358] env[70020]: DEBUG oslo_vmware.api [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617864, 'name': Destroy_Task, 'duration_secs': 0.479832} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.694613] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Destroyed the VM [ 669.694847] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Deleting Snapshot of the VM instance {{(pid=70020) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 669.695459] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-6c5e15d2-4396-4e87-9a9a-159a5ba9a025 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.701832] env[70020]: DEBUG oslo_vmware.api [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Waiting for the task: (returnval){ [ 669.701832] env[70020]: value = "task-3617866" [ 669.701832] env[70020]: _type = "Task" [ 669.701832] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.709970] env[70020]: DEBUG oslo_vmware.api [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617866, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.816288] env[70020]: DEBUG oslo_vmware.api [None req-9eb3a214-7a4b-4fda-831a-d21a46069753 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Task: {'id': task-3617858, 'name': PowerOffVM_Task, 'duration_secs': 0.662675} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.816946] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-9eb3a214-7a4b-4fda-831a-d21a46069753 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 669.817203] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9eb3a214-7a4b-4fda-831a-d21a46069753 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 669.818344] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2e6b9e33-b57c-49ec-8bff-d7a0714dd65e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.865024] env[70020]: DEBUG nova.compute.utils [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 669.868271] env[70020]: DEBUG nova.compute.manager [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 669.871216] env[70020]: DEBUG nova.network.neutron [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 669.880919] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9eb3a214-7a4b-4fda-831a-d21a46069753 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 669.880919] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9eb3a214-7a4b-4fda-831a-d21a46069753 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 669.880919] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-9eb3a214-7a4b-4fda-831a-d21a46069753 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Deleting the datastore file [datastore2] 79d98176-b566-4349-ad10-c2ea6fdbc657 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 669.880919] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-40614534-b418-452b-a920-d839713bed51 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.893669] env[70020]: DEBUG oslo_vmware.api [None req-9eb3a214-7a4b-4fda-831a-d21a46069753 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Waiting for the task: (returnval){ [ 669.893669] env[70020]: value = "task-3617868" [ 669.893669] env[70020]: _type = "Task" [ 669.893669] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.904152] env[70020]: DEBUG oslo_vmware.api [None req-9eb3a214-7a4b-4fda-831a-d21a46069753 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Task: {'id': task-3617868, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.944035] env[70020]: DEBUG nova.policy [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '41059e43a7644fa4876da5770e24f735', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'add37b0346e74e7f9724e69253e2cffc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 670.112320] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617862, 'name': CreateVM_Task, 'duration_secs': 0.525354} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.112720] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 670.114289] env[70020]: DEBUG oslo_concurrency.lockutils [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.114524] env[70020]: DEBUG oslo_concurrency.lockutils [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 670.114859] env[70020]: DEBUG oslo_concurrency.lockutils [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 670.115153] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b057f1c-9082-4619-9025-d5d6c1a0287d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.120357] env[70020]: DEBUG oslo_vmware.api [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for the task: (returnval){ [ 670.120357] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]524f0a03-e336-2fee-25bd-0f470cac12c3" [ 670.120357] env[70020]: _type = "Task" [ 670.120357] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.131322] env[70020]: DEBUG oslo_vmware.api [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]524f0a03-e336-2fee-25bd-0f470cac12c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.169277] env[70020]: DEBUG oslo_vmware.api [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Task: {'id': task-3617865, 'name': PowerOnVM_Task, 'duration_secs': 0.501332} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.170275] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 670.170275] env[70020]: INFO nova.compute.manager [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Took 9.99 seconds to spawn the instance on the hypervisor. [ 670.170275] env[70020]: DEBUG nova.compute.manager [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 670.171593] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7c40852-d83f-4a53-86a8-3b36c6da5a9c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.213596] env[70020]: DEBUG oslo_vmware.api [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617866, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.297265] env[70020]: DEBUG nova.network.neutron [-] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.339126] env[70020]: DEBUG nova.network.neutron [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Successfully created port: b2ae7c4e-fcb4-4d62-9ff1-82de773af513 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 670.368911] env[70020]: DEBUG nova.compute.manager [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 670.374662] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3ca8e6b0-f5fb-4876-8e24-36388c6ba628 tempest-ServersAdmin275Test-1125750199 tempest-ServersAdmin275Test-1125750199-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 670.376315] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.392s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 670.378776] env[70020]: INFO nova.compute.claims [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 670.411988] env[70020]: DEBUG oslo_vmware.api [None req-9eb3a214-7a4b-4fda-831a-d21a46069753 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Task: {'id': task-3617868, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.357465} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.411988] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-9eb3a214-7a4b-4fda-831a-d21a46069753 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 670.411988] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9eb3a214-7a4b-4fda-831a-d21a46069753 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 670.412318] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9eb3a214-7a4b-4fda-831a-d21a46069753 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 670.412318] env[70020]: INFO nova.compute.manager [None req-9eb3a214-7a4b-4fda-831a-d21a46069753 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Took 1.64 seconds to destroy the instance on the hypervisor. [ 670.412563] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9eb3a214-7a4b-4fda-831a-d21a46069753 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 670.412747] env[70020]: DEBUG nova.compute.manager [-] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 670.412838] env[70020]: DEBUG nova.network.neutron [-] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 670.634466] env[70020]: DEBUG oslo_vmware.api [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]524f0a03-e336-2fee-25bd-0f470cac12c3, 'name': SearchDatastore_Task, 'duration_secs': 0.022185} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.634466] env[70020]: DEBUG oslo_concurrency.lockutils [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 670.634466] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 670.634466] env[70020]: DEBUG oslo_concurrency.lockutils [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.635235] env[70020]: DEBUG oslo_concurrency.lockutils [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 670.635351] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 670.636498] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c830a09-6309-4b88-9c9c-5929e8071503 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.651549] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 670.651549] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 670.652356] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c338e1ef-7ac0-4115-b67c-49627eee81f6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.659802] env[70020]: DEBUG oslo_vmware.api [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for the task: (returnval){ [ 670.659802] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52969cf1-250d-0e1c-2868-efe17c72a05b" [ 670.659802] env[70020]: _type = "Task" [ 670.659802] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.678410] env[70020]: DEBUG oslo_vmware.api [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52969cf1-250d-0e1c-2868-efe17c72a05b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.699900] env[70020]: INFO nova.compute.manager [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Took 31.46 seconds to build instance. [ 670.718013] env[70020]: DEBUG oslo_vmware.api [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617866, 'name': RemoveSnapshot_Task, 'duration_secs': 0.982816} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.718341] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Deleted Snapshot of the VM instance {{(pid=70020) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 670.718665] env[70020]: INFO nova.compute.manager [None req-cdf010de-9d75-4dc4-a180-a131944b5f97 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Took 16.92 seconds to snapshot the instance on the hypervisor. [ 670.801032] env[70020]: INFO nova.compute.manager [-] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Took 1.81 seconds to deallocate network for instance. [ 670.822920] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Acquiring lock "b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 670.822920] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Lock "b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 671.176150] env[70020]: DEBUG oslo_vmware.api [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52969cf1-250d-0e1c-2868-efe17c72a05b, 'name': SearchDatastore_Task, 'duration_secs': 0.018622} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.176624] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-618a32b9-2ac5-432f-8d3c-76d822298dda {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.187350] env[70020]: DEBUG oslo_vmware.api [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for the task: (returnval){ [ 671.187350] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52dd21fb-5e1b-f125-8e4a-74fd367909fe" [ 671.187350] env[70020]: _type = "Task" [ 671.187350] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.197106] env[70020]: DEBUG oslo_vmware.api [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52dd21fb-5e1b-f125-8e4a-74fd367909fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.204223] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3ef8e448-f2f6-406c-947b-16b7d5780607 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Lock "0f89d49e-d26c-4d5d-90d7-6f0bf3d67468" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.972s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 671.235513] env[70020]: DEBUG nova.compute.manager [req-32c80205-fe80-472e-836a-8651b32baca4 req-e4129753-ac63-40bf-80d9-e09b98476378 service nova] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Received event network-vif-plugged-bfb6aa7f-bef1-4a61-8430-16719d55f556 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 671.236147] env[70020]: DEBUG oslo_concurrency.lockutils [req-32c80205-fe80-472e-836a-8651b32baca4 req-e4129753-ac63-40bf-80d9-e09b98476378 service nova] Acquiring lock "832a38c8-ed3a-460b-91bd-0138d2f2d03d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 671.236147] env[70020]: DEBUG oslo_concurrency.lockutils [req-32c80205-fe80-472e-836a-8651b32baca4 req-e4129753-ac63-40bf-80d9-e09b98476378 service nova] Lock "832a38c8-ed3a-460b-91bd-0138d2f2d03d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 671.238091] env[70020]: DEBUG oslo_concurrency.lockutils [req-32c80205-fe80-472e-836a-8651b32baca4 req-e4129753-ac63-40bf-80d9-e09b98476378 service nova] Lock "832a38c8-ed3a-460b-91bd-0138d2f2d03d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 671.239101] env[70020]: DEBUG nova.compute.manager [req-32c80205-fe80-472e-836a-8651b32baca4 req-e4129753-ac63-40bf-80d9-e09b98476378 service nova] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] No waiting events found dispatching network-vif-plugged-bfb6aa7f-bef1-4a61-8430-16719d55f556 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 671.239101] env[70020]: WARNING nova.compute.manager [req-32c80205-fe80-472e-836a-8651b32baca4 req-e4129753-ac63-40bf-80d9-e09b98476378 service nova] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Received unexpected event network-vif-plugged-bfb6aa7f-bef1-4a61-8430-16719d55f556 for instance with vm_state building and task_state spawning. [ 671.239101] env[70020]: DEBUG nova.compute.manager [req-32c80205-fe80-472e-836a-8651b32baca4 req-e4129753-ac63-40bf-80d9-e09b98476378 service nova] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Received event network-changed-bfb6aa7f-bef1-4a61-8430-16719d55f556 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 671.239101] env[70020]: DEBUG nova.compute.manager [req-32c80205-fe80-472e-836a-8651b32baca4 req-e4129753-ac63-40bf-80d9-e09b98476378 service nova] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Refreshing instance network info cache due to event network-changed-bfb6aa7f-bef1-4a61-8430-16719d55f556. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 671.239309] env[70020]: DEBUG oslo_concurrency.lockutils [req-32c80205-fe80-472e-836a-8651b32baca4 req-e4129753-ac63-40bf-80d9-e09b98476378 service nova] Acquiring lock "refresh_cache-832a38c8-ed3a-460b-91bd-0138d2f2d03d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.239309] env[70020]: DEBUG oslo_concurrency.lockutils [req-32c80205-fe80-472e-836a-8651b32baca4 req-e4129753-ac63-40bf-80d9-e09b98476378 service nova] Acquired lock "refresh_cache-832a38c8-ed3a-460b-91bd-0138d2f2d03d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 671.239614] env[70020]: DEBUG nova.network.neutron [req-32c80205-fe80-472e-836a-8651b32baca4 req-e4129753-ac63-40bf-80d9-e09b98476378 service nova] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Refreshing network info cache for port bfb6aa7f-bef1-4a61-8430-16719d55f556 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 671.313142] env[70020]: DEBUG oslo_concurrency.lockutils [None req-31006f5b-23ce-45a2-8bd2-c1f83f75c4db tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 671.346508] env[70020]: DEBUG nova.compute.manager [req-42c9d5d4-741a-4bbd-a8cd-60ace081ffc8 req-27b0bd26-1a6e-4c20-bebe-162f27df8b64 service nova] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Received event network-changed-c5dfdf74-4ed7-460a-b458-cb45cbc910c2 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 671.346705] env[70020]: DEBUG nova.compute.manager [req-42c9d5d4-741a-4bbd-a8cd-60ace081ffc8 req-27b0bd26-1a6e-4c20-bebe-162f27df8b64 service nova] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Refreshing instance network info cache due to event network-changed-c5dfdf74-4ed7-460a-b458-cb45cbc910c2. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 671.346917] env[70020]: DEBUG oslo_concurrency.lockutils [req-42c9d5d4-741a-4bbd-a8cd-60ace081ffc8 req-27b0bd26-1a6e-4c20-bebe-162f27df8b64 service nova] Acquiring lock "refresh_cache-3501a6fc-f090-4098-8f63-57a97bd61f1b" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.347822] env[70020]: DEBUG oslo_concurrency.lockutils [req-42c9d5d4-741a-4bbd-a8cd-60ace081ffc8 req-27b0bd26-1a6e-4c20-bebe-162f27df8b64 service nova] Acquired lock "refresh_cache-3501a6fc-f090-4098-8f63-57a97bd61f1b" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 671.348355] env[70020]: DEBUG nova.network.neutron [req-42c9d5d4-741a-4bbd-a8cd-60ace081ffc8 req-27b0bd26-1a6e-4c20-bebe-162f27df8b64 service nova] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Refreshing network info cache for port c5dfdf74-4ed7-460a-b458-cb45cbc910c2 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 671.383891] env[70020]: DEBUG nova.compute.manager [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 671.420797] env[70020]: DEBUG nova.virt.hardware [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 671.421106] env[70020]: DEBUG nova.virt.hardware [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 671.421262] env[70020]: DEBUG nova.virt.hardware [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 671.421479] env[70020]: DEBUG nova.virt.hardware [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 671.421660] env[70020]: DEBUG nova.virt.hardware [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 671.421810] env[70020]: DEBUG nova.virt.hardware [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 671.422041] env[70020]: DEBUG nova.virt.hardware [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 671.422210] env[70020]: DEBUG nova.virt.hardware [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 671.422377] env[70020]: DEBUG nova.virt.hardware [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 671.422539] env[70020]: DEBUG nova.virt.hardware [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 671.422799] env[70020]: DEBUG nova.virt.hardware [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 671.424193] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60d33e48-e952-466c-81ba-9d7f72cb579d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.436795] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6178b149-0a79-4d6a-a943-ef2fd6ba5a1f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.574656] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 671.574841] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 671.703554] env[70020]: DEBUG oslo_vmware.api [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52dd21fb-5e1b-f125-8e4a-74fd367909fe, 'name': SearchDatastore_Task, 'duration_secs': 0.038479} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.705977] env[70020]: DEBUG oslo_concurrency.lockutils [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 671.705977] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 832a38c8-ed3a-460b-91bd-0138d2f2d03d/832a38c8-ed3a-460b-91bd-0138d2f2d03d.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 671.712179] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0cdfd4e9-d7b4-4af1-9666-29fa462a4763 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.715295] env[70020]: DEBUG nova.compute.manager [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 671.717811] env[70020]: DEBUG oslo_concurrency.lockutils [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Acquiring lock "61bea079-9731-48d1-b472-b30226a0b5a1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 671.718075] env[70020]: DEBUG oslo_concurrency.lockutils [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Lock "61bea079-9731-48d1-b472-b30226a0b5a1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 671.724792] env[70020]: DEBUG oslo_vmware.api [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for the task: (returnval){ [ 671.724792] env[70020]: value = "task-3617870" [ 671.724792] env[70020]: _type = "Task" [ 671.724792] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.736858] env[70020]: DEBUG oslo_vmware.api [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3617870, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.994059] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e42e2014-f5d0-4466-8c8b-7e400d86f71b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.005019] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cec3526c-51c8-4928-93f7-b31f6e257a33 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.047654] env[70020]: DEBUG nova.network.neutron [req-32c80205-fe80-472e-836a-8651b32baca4 req-e4129753-ac63-40bf-80d9-e09b98476378 service nova] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Updated VIF entry in instance network info cache for port bfb6aa7f-bef1-4a61-8430-16719d55f556. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 672.047654] env[70020]: DEBUG nova.network.neutron [req-32c80205-fe80-472e-836a-8651b32baca4 req-e4129753-ac63-40bf-80d9-e09b98476378 service nova] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Updating instance_info_cache with network_info: [{"id": "bfb6aa7f-bef1-4a61-8430-16719d55f556", "address": "fa:16:3e:f3:07:11", "network": {"id": "2c56b682-8e9a-4fb4-9724-a34a00bb4455", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1229093518-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "affdcbe1612b434697a53a8692ef77a4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4d548e7-d762-406a-bb2d-dc7168a8ca67", "external-id": "nsx-vlan-transportzone-796", "segmentation_id": 796, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbfb6aa7f-be", "ovs_interfaceid": "bfb6aa7f-bef1-4a61-8430-16719d55f556", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.051860] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3780fd02-369c-4d95-be93-2e07053dde4d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.062714] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c97e20b2-65b1-43e5-9a61-5facf6c9adfc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.092342] env[70020]: DEBUG nova.compute.provider_tree [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 672.094352] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 672.094783] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 672.094981] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 672.095168] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 672.095387] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 672.095568] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 672.095710] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=70020) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 672.095919] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager.update_available_resource {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 672.202200] env[70020]: DEBUG oslo_concurrency.lockutils [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "13f6daa5-d859-40ed-b1b0-edd7717b8df3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 672.202531] env[70020]: DEBUG oslo_concurrency.lockutils [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "13f6daa5-d859-40ed-b1b0-edd7717b8df3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 672.221617] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9a29a062-3002-4c4d-9603-beceef397242 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Acquiring lock "1f95bfa8-bc97-4ed7-8c33-c00297430bf5" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 672.221849] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9a29a062-3002-4c4d-9603-beceef397242 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Lock "1f95bfa8-bc97-4ed7-8c33-c00297430bf5" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 672.239561] env[70020]: DEBUG oslo_vmware.api [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3617870, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.476412} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.240414] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 832a38c8-ed3a-460b-91bd-0138d2f2d03d/832a38c8-ed3a-460b-91bd-0138d2f2d03d.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 672.240713] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 672.241186] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d5887306-a0a9-4c25-a6cc-ba5f88be58e8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.248784] env[70020]: DEBUG oslo_vmware.api [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for the task: (returnval){ [ 672.248784] env[70020]: value = "task-3617871" [ 672.248784] env[70020]: _type = "Task" [ 672.248784] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.253701] env[70020]: DEBUG oslo_concurrency.lockutils [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 672.259296] env[70020]: DEBUG oslo_vmware.api [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3617871, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.462561] env[70020]: DEBUG nova.network.neutron [-] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.467061] env[70020]: DEBUG nova.network.neutron [req-42c9d5d4-741a-4bbd-a8cd-60ace081ffc8 req-27b0bd26-1a6e-4c20-bebe-162f27df8b64 service nova] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Updated VIF entry in instance network info cache for port c5dfdf74-4ed7-460a-b458-cb45cbc910c2. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 672.467461] env[70020]: DEBUG nova.network.neutron [req-42c9d5d4-741a-4bbd-a8cd-60ace081ffc8 req-27b0bd26-1a6e-4c20-bebe-162f27df8b64 service nova] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Updating instance_info_cache with network_info: [{"id": "c5dfdf74-4ed7-460a-b458-cb45cbc910c2", "address": "fa:16:3e:40:87:06", "network": {"id": "0676912c-462c-45af-8413-43c139247139", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-574076578-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.224", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "852583dc12774b19bffbb2b0791e8336", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f267bcdd-0daa-4337-9709-5fc060c267d8", "external-id": "nsx-vlan-transportzone-308", "segmentation_id": 308, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5dfdf74-4e", "ovs_interfaceid": "c5dfdf74-4ed7-460a-b458-cb45cbc910c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.512795] env[70020]: DEBUG nova.network.neutron [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Successfully updated port: b2ae7c4e-fcb4-4d62-9ff1-82de773af513 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 672.553603] env[70020]: DEBUG oslo_concurrency.lockutils [req-32c80205-fe80-472e-836a-8651b32baca4 req-e4129753-ac63-40bf-80d9-e09b98476378 service nova] Releasing lock "refresh_cache-832a38c8-ed3a-460b-91bd-0138d2f2d03d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 672.597232] env[70020]: DEBUG nova.scheduler.client.report [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 672.602540] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 672.730698] env[70020]: INFO nova.compute.manager [None req-9a29a062-3002-4c4d-9603-beceef397242 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Detaching volume e54a3dde-2eb6-4c6d-aea0-e0c46f4906bd [ 672.765019] env[70020]: DEBUG oslo_vmware.api [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3617871, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.210448} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.765019] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 672.765019] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbe22ea7-65bd-4351-b38c-b525df13f8cd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.793392] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Reconfiguring VM instance instance-00000016 to attach disk [datastore1] 832a38c8-ed3a-460b-91bd-0138d2f2d03d/832a38c8-ed3a-460b-91bd-0138d2f2d03d.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 672.794887] env[70020]: INFO nova.virt.block_device [None req-9a29a062-3002-4c4d-9603-beceef397242 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Attempting to driver detach volume e54a3dde-2eb6-4c6d-aea0-e0c46f4906bd from mountpoint /dev/sdb [ 672.795176] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a29a062-3002-4c4d-9603-beceef397242 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Volume detach. Driver type: vmdk {{(pid=70020) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 672.795407] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a29a062-3002-4c4d-9603-beceef397242 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721591', 'volume_id': 'e54a3dde-2eb6-4c6d-aea0-e0c46f4906bd', 'name': 'volume-e54a3dde-2eb6-4c6d-aea0-e0c46f4906bd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1f95bfa8-bc97-4ed7-8c33-c00297430bf5', 'attached_at': '', 'detached_at': '', 'volume_id': 'e54a3dde-2eb6-4c6d-aea0-e0c46f4906bd', 'serial': 'e54a3dde-2eb6-4c6d-aea0-e0c46f4906bd'} {{(pid=70020) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 672.795745] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef1b3961-5b3e-495d-90f5-a3a4e9ad4500 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.815652] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7540ffc-3f38-4c14-923d-6f9b595612ad {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.841219] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8231c82-7364-480b-96d7-058e38460ca3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.844858] env[70020]: DEBUG oslo_vmware.api [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for the task: (returnval){ [ 672.844858] env[70020]: value = "task-3617872" [ 672.844858] env[70020]: _type = "Task" [ 672.844858] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.851988] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f5e3d75-7a36-46b2-a107-cc2d4ce7a36e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.859779] env[70020]: DEBUG oslo_vmware.api [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3617872, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.879480] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c61bc9e4-a249-4030-a478-0874df788b7e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.899038] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a29a062-3002-4c4d-9603-beceef397242 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] The volume has not been displaced from its original location: [datastore1] volume-e54a3dde-2eb6-4c6d-aea0-e0c46f4906bd/volume-e54a3dde-2eb6-4c6d-aea0-e0c46f4906bd.vmdk. No consolidation needed. {{(pid=70020) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 672.904185] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a29a062-3002-4c4d-9603-beceef397242 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Reconfiguring VM instance instance-00000006 to detach disk 2001 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 672.904607] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-baf91168-72cf-4e60-8b54-8b948aad18de {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.921842] env[70020]: DEBUG oslo_vmware.api [None req-9a29a062-3002-4c4d-9603-beceef397242 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Waiting for the task: (returnval){ [ 672.921842] env[70020]: value = "task-3617873" [ 672.921842] env[70020]: _type = "Task" [ 672.921842] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.930793] env[70020]: DEBUG oslo_vmware.api [None req-9a29a062-3002-4c4d-9603-beceef397242 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3617873, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.963801] env[70020]: INFO nova.compute.manager [-] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Took 2.55 seconds to deallocate network for instance. [ 672.970470] env[70020]: DEBUG oslo_concurrency.lockutils [req-42c9d5d4-741a-4bbd-a8cd-60ace081ffc8 req-27b0bd26-1a6e-4c20-bebe-162f27df8b64 service nova] Releasing lock "refresh_cache-3501a6fc-f090-4098-8f63-57a97bd61f1b" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 673.022102] env[70020]: DEBUG oslo_concurrency.lockutils [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Acquiring lock "refresh_cache-50ce7a0c-aa80-4816-b84e-d8ff7b10fffb" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.022323] env[70020]: DEBUG oslo_concurrency.lockutils [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Acquired lock "refresh_cache-50ce7a0c-aa80-4816-b84e-d8ff7b10fffb" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 673.022768] env[70020]: DEBUG nova.network.neutron [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 673.104811] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.728s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 673.105335] env[70020]: DEBUG nova.compute.manager [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 673.108952] env[70020]: DEBUG oslo_concurrency.lockutils [None req-121efe45-7a44-4df3-9ad7-2a711718e9b4 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.746s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 673.109208] env[70020]: DEBUG nova.objects.instance [None req-121efe45-7a44-4df3-9ad7-2a711718e9b4 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lazy-loading 'resources' on Instance uuid 81d5a1b4-1398-4fca-b500-aa2a3dc41494 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 673.360820] env[70020]: DEBUG oslo_vmware.api [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3617872, 'name': ReconfigVM_Task, 'duration_secs': 0.299736} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.361169] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Reconfigured VM instance instance-00000016 to attach disk [datastore1] 832a38c8-ed3a-460b-91bd-0138d2f2d03d/832a38c8-ed3a-460b-91bd-0138d2f2d03d.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 673.365971] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-90347907-328d-4d0e-abfe-2d0751091668 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.372665] env[70020]: DEBUG oslo_vmware.api [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for the task: (returnval){ [ 673.372665] env[70020]: value = "task-3617875" [ 673.372665] env[70020]: _type = "Task" [ 673.372665] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.386936] env[70020]: DEBUG oslo_vmware.api [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3617875, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.434040] env[70020]: DEBUG oslo_vmware.api [None req-9a29a062-3002-4c4d-9603-beceef397242 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3617873, 'name': ReconfigVM_Task, 'duration_secs': 0.409337} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.434425] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a29a062-3002-4c4d-9603-beceef397242 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Reconfigured VM instance instance-00000006 to detach disk 2001 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 673.440640] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f17cb52-7cbc-4e16-9b54-39b3e758f43c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.460357] env[70020]: DEBUG oslo_vmware.api [None req-9a29a062-3002-4c4d-9603-beceef397242 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Waiting for the task: (returnval){ [ 673.460357] env[70020]: value = "task-3617876" [ 673.460357] env[70020]: _type = "Task" [ 673.460357] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.467713] env[70020]: DEBUG oslo_vmware.api [None req-9a29a062-3002-4c4d-9603-beceef397242 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3617876, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.471949] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9eb3a214-7a4b-4fda-831a-d21a46069753 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 673.554811] env[70020]: DEBUG nova.network.neutron [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 673.616798] env[70020]: DEBUG nova.compute.utils [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 673.618443] env[70020]: DEBUG nova.compute.manager [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 673.618652] env[70020]: DEBUG nova.network.neutron [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 673.723913] env[70020]: DEBUG nova.policy [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7eef18a9e7e94e9a8304272a7195e9fb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '45b60595b57a44f1b620892bac738904', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 673.787124] env[70020]: DEBUG nova.network.neutron [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Updating instance_info_cache with network_info: [{"id": "b2ae7c4e-fcb4-4d62-9ff1-82de773af513", "address": "fa:16:3e:33:a0:4d", "network": {"id": "b284478c-c5d9-48ae-8f9e-db4ed9b7d525", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-354423789-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "add37b0346e74e7f9724e69253e2cffc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b931c4c-f73c-4fbd-9c9f-0270834cc69e", "external-id": "nsx-vlan-transportzone-177", "segmentation_id": 177, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2ae7c4e-fc", "ovs_interfaceid": "b2ae7c4e-fcb4-4d62-9ff1-82de773af513", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.885441] env[70020]: DEBUG oslo_vmware.api [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3617875, 'name': Rename_Task, 'duration_secs': 0.134898} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.885790] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 673.886617] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cafe3c7a-fc78-4a0c-bb98-08c8929f54ab {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.895289] env[70020]: DEBUG oslo_vmware.api [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for the task: (returnval){ [ 673.895289] env[70020]: value = "task-3617877" [ 673.895289] env[70020]: _type = "Task" [ 673.895289] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.903443] env[70020]: DEBUG oslo_vmware.api [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3617877, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.936787] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Acquiring lock "16c45b86-317a-4d0c-a402-51c85af37a5b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 673.937027] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Lock "16c45b86-317a-4d0c-a402-51c85af37a5b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 673.972586] env[70020]: DEBUG oslo_vmware.api [None req-9a29a062-3002-4c4d-9603-beceef397242 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3617876, 'name': ReconfigVM_Task, 'duration_secs': 0.200816} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.972586] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a29a062-3002-4c4d-9603-beceef397242 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721591', 'volume_id': 'e54a3dde-2eb6-4c6d-aea0-e0c46f4906bd', 'name': 'volume-e54a3dde-2eb6-4c6d-aea0-e0c46f4906bd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1f95bfa8-bc97-4ed7-8c33-c00297430bf5', 'attached_at': '', 'detached_at': '', 'volume_id': 'e54a3dde-2eb6-4c6d-aea0-e0c46f4906bd', 'serial': 'e54a3dde-2eb6-4c6d-aea0-e0c46f4906bd'} {{(pid=70020) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 673.994572] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2efc0ca8-1101-4645-8662-ea4fe4b05789 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Acquiring lock "0f89d49e-d26c-4d5d-90d7-6f0bf3d67468" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 673.994572] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2efc0ca8-1101-4645-8662-ea4fe4b05789 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Lock "0f89d49e-d26c-4d5d-90d7-6f0bf3d67468" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 673.994572] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2efc0ca8-1101-4645-8662-ea4fe4b05789 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Acquiring lock "0f89d49e-d26c-4d5d-90d7-6f0bf3d67468-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 673.994572] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2efc0ca8-1101-4645-8662-ea4fe4b05789 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Lock "0f89d49e-d26c-4d5d-90d7-6f0bf3d67468-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 673.994723] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2efc0ca8-1101-4645-8662-ea4fe4b05789 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Lock "0f89d49e-d26c-4d5d-90d7-6f0bf3d67468-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 673.995606] env[70020]: INFO nova.compute.manager [None req-2efc0ca8-1101-4645-8662-ea4fe4b05789 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Terminating instance [ 674.003611] env[70020]: DEBUG nova.compute.manager [req-8d2a3a4c-9954-4e6f-89cb-ce57f66dbd20 req-bab97dbd-7279-44df-9bc3-2eb69cb32e5a service nova] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Received event network-vif-deleted-c7c80204-b8b4-46c6-8d93-38d4879119d3 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 674.005027] env[70020]: DEBUG nova.compute.manager [req-8d2a3a4c-9954-4e6f-89cb-ce57f66dbd20 req-bab97dbd-7279-44df-9bc3-2eb69cb32e5a service nova] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Received event network-vif-plugged-b2ae7c4e-fcb4-4d62-9ff1-82de773af513 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 674.005027] env[70020]: DEBUG oslo_concurrency.lockutils [req-8d2a3a4c-9954-4e6f-89cb-ce57f66dbd20 req-bab97dbd-7279-44df-9bc3-2eb69cb32e5a service nova] Acquiring lock "50ce7a0c-aa80-4816-b84e-d8ff7b10fffb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 674.005027] env[70020]: DEBUG oslo_concurrency.lockutils [req-8d2a3a4c-9954-4e6f-89cb-ce57f66dbd20 req-bab97dbd-7279-44df-9bc3-2eb69cb32e5a service nova] Lock "50ce7a0c-aa80-4816-b84e-d8ff7b10fffb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 674.005027] env[70020]: DEBUG oslo_concurrency.lockutils [req-8d2a3a4c-9954-4e6f-89cb-ce57f66dbd20 req-bab97dbd-7279-44df-9bc3-2eb69cb32e5a service nova] Lock "50ce7a0c-aa80-4816-b84e-d8ff7b10fffb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 674.005027] env[70020]: DEBUG nova.compute.manager [req-8d2a3a4c-9954-4e6f-89cb-ce57f66dbd20 req-bab97dbd-7279-44df-9bc3-2eb69cb32e5a service nova] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] No waiting events found dispatching network-vif-plugged-b2ae7c4e-fcb4-4d62-9ff1-82de773af513 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 674.005193] env[70020]: WARNING nova.compute.manager [req-8d2a3a4c-9954-4e6f-89cb-ce57f66dbd20 req-bab97dbd-7279-44df-9bc3-2eb69cb32e5a service nova] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Received unexpected event network-vif-plugged-b2ae7c4e-fcb4-4d62-9ff1-82de773af513 for instance with vm_state building and task_state spawning. [ 674.005193] env[70020]: DEBUG nova.compute.manager [req-8d2a3a4c-9954-4e6f-89cb-ce57f66dbd20 req-bab97dbd-7279-44df-9bc3-2eb69cb32e5a service nova] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Received event network-changed-b2ae7c4e-fcb4-4d62-9ff1-82de773af513 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 674.006376] env[70020]: DEBUG nova.compute.manager [req-8d2a3a4c-9954-4e6f-89cb-ce57f66dbd20 req-bab97dbd-7279-44df-9bc3-2eb69cb32e5a service nova] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Refreshing instance network info cache due to event network-changed-b2ae7c4e-fcb4-4d62-9ff1-82de773af513. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 674.006661] env[70020]: DEBUG oslo_concurrency.lockutils [req-8d2a3a4c-9954-4e6f-89cb-ce57f66dbd20 req-bab97dbd-7279-44df-9bc3-2eb69cb32e5a service nova] Acquiring lock "refresh_cache-50ce7a0c-aa80-4816-b84e-d8ff7b10fffb" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 674.073928] env[70020]: DEBUG nova.compute.manager [req-954f80ae-08ac-466e-98cb-36202268e151 req-fc4d042a-0d4e-4849-b659-9c655a6f56ea service nova] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Received event network-vif-deleted-16a8d745-ea55-4e94-9513-0b5547738678 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 674.074213] env[70020]: DEBUG nova.compute.manager [req-954f80ae-08ac-466e-98cb-36202268e151 req-fc4d042a-0d4e-4849-b659-9c655a6f56ea service nova] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Received event network-changed-31ef45d2-b59a-4c2c-9fdc-f17ae158e442 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 674.074369] env[70020]: DEBUG nova.compute.manager [req-954f80ae-08ac-466e-98cb-36202268e151 req-fc4d042a-0d4e-4849-b659-9c655a6f56ea service nova] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Refreshing instance network info cache due to event network-changed-31ef45d2-b59a-4c2c-9fdc-f17ae158e442. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 674.074600] env[70020]: DEBUG oslo_concurrency.lockutils [req-954f80ae-08ac-466e-98cb-36202268e151 req-fc4d042a-0d4e-4849-b659-9c655a6f56ea service nova] Acquiring lock "refresh_cache-301b30f6-9909-4fc9-8721-88a314e4edb4" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 674.074739] env[70020]: DEBUG oslo_concurrency.lockutils [req-954f80ae-08ac-466e-98cb-36202268e151 req-fc4d042a-0d4e-4849-b659-9c655a6f56ea service nova] Acquired lock "refresh_cache-301b30f6-9909-4fc9-8721-88a314e4edb4" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 674.074910] env[70020]: DEBUG nova.network.neutron [req-954f80ae-08ac-466e-98cb-36202268e151 req-fc4d042a-0d4e-4849-b659-9c655a6f56ea service nova] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Refreshing network info cache for port 31ef45d2-b59a-4c2c-9fdc-f17ae158e442 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 674.126412] env[70020]: DEBUG nova.compute.manager [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 674.173742] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3824083-8159-4938-9a7a-4232300d0193 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.186910] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b42760d-b89a-442b-b0d0-4c7e385371a1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.912092] env[70020]: DEBUG nova.network.neutron [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Successfully created port: 5107e297-9610-48b2-bcdf-2ec121611559 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 674.914710] env[70020]: DEBUG nova.objects.instance [None req-9a29a062-3002-4c4d-9603-beceef397242 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Lazy-loading 'flavor' on Instance uuid 1f95bfa8-bc97-4ed7-8c33-c00297430bf5 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 674.916528] env[70020]: DEBUG oslo_concurrency.lockutils [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Releasing lock "refresh_cache-50ce7a0c-aa80-4816-b84e-d8ff7b10fffb" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 674.916789] env[70020]: DEBUG nova.compute.manager [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Instance network_info: |[{"id": "b2ae7c4e-fcb4-4d62-9ff1-82de773af513", "address": "fa:16:3e:33:a0:4d", "network": {"id": "b284478c-c5d9-48ae-8f9e-db4ed9b7d525", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-354423789-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "add37b0346e74e7f9724e69253e2cffc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b931c4c-f73c-4fbd-9c9f-0270834cc69e", "external-id": "nsx-vlan-transportzone-177", "segmentation_id": 177, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2ae7c4e-fc", "ovs_interfaceid": "b2ae7c4e-fcb4-4d62-9ff1-82de773af513", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 674.917547] env[70020]: DEBUG nova.compute.manager [None req-2efc0ca8-1101-4645-8662-ea4fe4b05789 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 674.917653] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2efc0ca8-1101-4645-8662-ea4fe4b05789 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 674.926037] env[70020]: DEBUG oslo_concurrency.lockutils [req-8d2a3a4c-9954-4e6f-89cb-ce57f66dbd20 req-bab97dbd-7279-44df-9bc3-2eb69cb32e5a service nova] Acquired lock "refresh_cache-50ce7a0c-aa80-4816-b84e-d8ff7b10fffb" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 674.926037] env[70020]: DEBUG nova.network.neutron [req-8d2a3a4c-9954-4e6f-89cb-ce57f66dbd20 req-bab97dbd-7279-44df-9bc3-2eb69cb32e5a service nova] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Refreshing network info cache for port b2ae7c4e-fcb4-4d62-9ff1-82de773af513 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 674.926037] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:33:a0:4d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2b931c4c-f73c-4fbd-9c9f-0270834cc69e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b2ae7c4e-fcb4-4d62-9ff1-82de773af513', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 674.934912] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 674.938798] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e994116-41a1-4da8-81a8-275f780a3f60 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.942843] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 674.944042] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ebe52910-cae3-457f-a58c-1e7889ed8844 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.009748] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5f8ce03-8279-4998-b42d-8c1fa60f19de {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.015140] env[70020]: DEBUG oslo_vmware.api [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3617877, 'name': PowerOnVM_Task, 'duration_secs': 0.541125} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.015140] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2efc0ca8-1101-4645-8662-ea4fe4b05789 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 675.017215] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 675.017355] env[70020]: INFO nova.compute.manager [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Took 8.79 seconds to spawn the instance on the hypervisor. [ 675.017601] env[70020]: DEBUG nova.compute.manager [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 675.018217] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3b8c39f1-0496-41c3-9b88-41ae82f98349 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.020276] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 675.020276] env[70020]: value = "task-3617878" [ 675.020276] env[70020]: _type = "Task" [ 675.020276] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.021756] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6c40653-23a2-419e-9401-e2da3396be3d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.032302] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa73535b-86d1-44e1-a951-1d8d151e07e4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.039256] env[70020]: DEBUG oslo_vmware.api [None req-2efc0ca8-1101-4645-8662-ea4fe4b05789 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Waiting for the task: (returnval){ [ 675.039256] env[70020]: value = "task-3617879" [ 675.039256] env[70020]: _type = "Task" [ 675.039256] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.047401] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617878, 'name': CreateVM_Task} progress is 15%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.062088] env[70020]: DEBUG nova.compute.provider_tree [None req-121efe45-7a44-4df3-9ad7-2a711718e9b4 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 675.069554] env[70020]: DEBUG oslo_vmware.api [None req-2efc0ca8-1101-4645-8662-ea4fe4b05789 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Task: {'id': task-3617879, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.272573] env[70020]: DEBUG nova.network.neutron [req-954f80ae-08ac-466e-98cb-36202268e151 req-fc4d042a-0d4e-4849-b659-9c655a6f56ea service nova] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Updated VIF entry in instance network info cache for port 31ef45d2-b59a-4c2c-9fdc-f17ae158e442. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 675.272949] env[70020]: DEBUG nova.network.neutron [req-954f80ae-08ac-466e-98cb-36202268e151 req-fc4d042a-0d4e-4849-b659-9c655a6f56ea service nova] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Updating instance_info_cache with network_info: [{"id": "31ef45d2-b59a-4c2c-9fdc-f17ae158e442", "address": "fa:16:3e:33:32:4f", "network": {"id": "47f58371-9cbc-4ed3-98be-09900c36cbf3", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-119128355-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.142", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90641c26c4064f219bf2e52694da4e0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a94c29-ddd5-4383-9219-1c2c3bb09cc5", "external-id": "nsx-vlan-transportzone-2", "segmentation_id": 2, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31ef45d2-b5", "ovs_interfaceid": "31ef45d2-b59a-4c2c-9fdc-f17ae158e442", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.446886] env[70020]: DEBUG nova.compute.manager [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 675.479473] env[70020]: DEBUG nova.virt.hardware [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 675.479473] env[70020]: DEBUG nova.virt.hardware [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 675.479637] env[70020]: DEBUG nova.virt.hardware [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 675.479832] env[70020]: DEBUG nova.virt.hardware [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 675.480178] env[70020]: DEBUG nova.virt.hardware [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 675.480447] env[70020]: DEBUG nova.virt.hardware [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 675.480770] env[70020]: DEBUG nova.virt.hardware [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 675.481205] env[70020]: DEBUG nova.virt.hardware [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 675.481205] env[70020]: DEBUG nova.virt.hardware [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 675.481386] env[70020]: DEBUG nova.virt.hardware [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 675.481448] env[70020]: DEBUG nova.virt.hardware [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 675.483008] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01446ef7-914d-4296-bdb5-8b4749047c70 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.491864] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-222db224-b258-410d-8120-1f2004f9b88e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.542024] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617878, 'name': CreateVM_Task, 'duration_secs': 0.386458} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.542024] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 675.542024] env[70020]: DEBUG oslo_concurrency.lockutils [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 675.542024] env[70020]: DEBUG oslo_concurrency.lockutils [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 675.542024] env[70020]: DEBUG oslo_concurrency.lockutils [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 675.542327] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-898111d3-727f-44bb-9dba-1f7d3c85889d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.547393] env[70020]: DEBUG oslo_vmware.api [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Waiting for the task: (returnval){ [ 675.547393] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52527601-4dbd-ee12-00e5-a347d4993188" [ 675.547393] env[70020]: _type = "Task" [ 675.547393] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.561484] env[70020]: INFO nova.compute.manager [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Took 29.00 seconds to build instance. [ 675.569040] env[70020]: DEBUG nova.scheduler.client.report [None req-121efe45-7a44-4df3-9ad7-2a711718e9b4 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 675.572377] env[70020]: DEBUG oslo_vmware.api [None req-2efc0ca8-1101-4645-8662-ea4fe4b05789 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Task: {'id': task-3617879, 'name': PowerOffVM_Task, 'duration_secs': 0.197798} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.578657] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2efc0ca8-1101-4645-8662-ea4fe4b05789 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 675.579327] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2efc0ca8-1101-4645-8662-ea4fe4b05789 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 675.579835] env[70020]: DEBUG oslo_vmware.api [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52527601-4dbd-ee12-00e5-a347d4993188, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.580649] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4655ba89-1cf0-4b04-ac21-966d4c3dfe55 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.729607] env[70020]: DEBUG nova.network.neutron [req-8d2a3a4c-9954-4e6f-89cb-ce57f66dbd20 req-bab97dbd-7279-44df-9bc3-2eb69cb32e5a service nova] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Updated VIF entry in instance network info cache for port b2ae7c4e-fcb4-4d62-9ff1-82de773af513. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 675.729999] env[70020]: DEBUG nova.network.neutron [req-8d2a3a4c-9954-4e6f-89cb-ce57f66dbd20 req-bab97dbd-7279-44df-9bc3-2eb69cb32e5a service nova] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Updating instance_info_cache with network_info: [{"id": "b2ae7c4e-fcb4-4d62-9ff1-82de773af513", "address": "fa:16:3e:33:a0:4d", "network": {"id": "b284478c-c5d9-48ae-8f9e-db4ed9b7d525", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-354423789-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "add37b0346e74e7f9724e69253e2cffc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b931c4c-f73c-4fbd-9c9f-0270834cc69e", "external-id": "nsx-vlan-transportzone-177", "segmentation_id": 177, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2ae7c4e-fc", "ovs_interfaceid": "b2ae7c4e-fcb4-4d62-9ff1-82de773af513", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.775995] env[70020]: DEBUG oslo_concurrency.lockutils [req-954f80ae-08ac-466e-98cb-36202268e151 req-fc4d042a-0d4e-4849-b659-9c655a6f56ea service nova] Releasing lock "refresh_cache-301b30f6-9909-4fc9-8721-88a314e4edb4" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 675.931753] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9a29a062-3002-4c4d-9603-beceef397242 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Lock "1f95bfa8-bc97-4ed7-8c33-c00297430bf5" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.709s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 676.062111] env[70020]: DEBUG oslo_vmware.api [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52527601-4dbd-ee12-00e5-a347d4993188, 'name': SearchDatastore_Task, 'duration_secs': 0.022939} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.062504] env[70020]: DEBUG oslo_concurrency.lockutils [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 676.062998] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 676.063474] env[70020]: DEBUG oslo_concurrency.lockutils [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 676.063715] env[70020]: DEBUG oslo_concurrency.lockutils [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 676.063980] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 676.064463] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f0104bce-7cc7-4881-ad42-0dd583bf88cd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.074873] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 676.074873] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 676.075235] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-682a380c-f402-4efb-abd6-a13036739086 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.079182] env[70020]: DEBUG oslo_concurrency.lockutils [None req-afeae229-8db9-441b-8b9c-adb38b7711c2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Lock "832a38c8-ed3a-460b-91bd-0138d2f2d03d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.547s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 676.079769] env[70020]: DEBUG oslo_concurrency.lockutils [None req-121efe45-7a44-4df3-9ad7-2a711718e9b4 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.971s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 676.089391] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.525s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 676.091555] env[70020]: INFO nova.compute.claims [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 676.099133] env[70020]: DEBUG oslo_vmware.api [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Waiting for the task: (returnval){ [ 676.099133] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52006733-9256-2f7a-7b73-3d4bcda70a6f" [ 676.099133] env[70020]: _type = "Task" [ 676.099133] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.110199] env[70020]: DEBUG oslo_vmware.api [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52006733-9256-2f7a-7b73-3d4bcda70a6f, 'name': SearchDatastore_Task, 'duration_secs': 0.019497} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.110654] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11b76455-36dd-43a5-92aa-3f6ea4ca7962 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.117224] env[70020]: DEBUG oslo_vmware.api [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Waiting for the task: (returnval){ [ 676.117224] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52d24cd7-81f8-3176-edd2-ec3e597c9475" [ 676.117224] env[70020]: _type = "Task" [ 676.117224] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.123831] env[70020]: INFO nova.scheduler.client.report [None req-121efe45-7a44-4df3-9ad7-2a711718e9b4 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Deleted allocations for instance 81d5a1b4-1398-4fca-b500-aa2a3dc41494 [ 676.130246] env[70020]: DEBUG oslo_vmware.api [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52d24cd7-81f8-3176-edd2-ec3e597c9475, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.232723] env[70020]: DEBUG oslo_concurrency.lockutils [req-8d2a3a4c-9954-4e6f-89cb-ce57f66dbd20 req-bab97dbd-7279-44df-9bc3-2eb69cb32e5a service nova] Releasing lock "refresh_cache-50ce7a0c-aa80-4816-b84e-d8ff7b10fffb" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 676.604301] env[70020]: DEBUG nova.compute.manager [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 676.633288] env[70020]: DEBUG oslo_vmware.api [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52d24cd7-81f8-3176-edd2-ec3e597c9475, 'name': SearchDatastore_Task, 'duration_secs': 0.017797} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.633288] env[70020]: DEBUG oslo_concurrency.lockutils [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 676.633288] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb/50ce7a0c-aa80-4816-b84e-d8ff7b10fffb.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 676.633288] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b744959b-6188-4456-9f16-86c903c94dba {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.639353] env[70020]: DEBUG oslo_concurrency.lockutils [None req-121efe45-7a44-4df3-9ad7-2a711718e9b4 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "81d5a1b4-1398-4fca-b500-aa2a3dc41494" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.951s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 676.645399] env[70020]: DEBUG oslo_vmware.api [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Waiting for the task: (returnval){ [ 676.645399] env[70020]: value = "task-3617882" [ 676.645399] env[70020]: _type = "Task" [ 676.645399] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.655749] env[70020]: DEBUG oslo_vmware.api [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': task-3617882, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.979449] env[70020]: DEBUG nova.network.neutron [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Successfully updated port: 5107e297-9610-48b2-bcdf-2ec121611559 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 677.143068] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 677.166511] env[70020]: DEBUG oslo_vmware.api [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': task-3617882, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.483813] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Acquiring lock "refresh_cache-7cf7f0a9-8240-4e78-b5d4-b1eb1da60764" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 677.483813] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Acquired lock "refresh_cache-7cf7f0a9-8240-4e78-b5d4-b1eb1da60764" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 677.483813] env[70020]: DEBUG nova.network.neutron [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 677.624658] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7df0088-9117-44fa-871d-221c4fd5b130 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.632637] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e23a4bb-73a4-46ee-9141-0d67355b8d35 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.668305] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d834c754-e5a4-49e7-81f4-9ad3b30ff207 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.675632] env[70020]: DEBUG oslo_vmware.api [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': task-3617882, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.602574} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.677723] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb/50ce7a0c-aa80-4816-b84e-d8ff7b10fffb.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 677.677947] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 677.678244] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d0e2a03e-632e-4c4d-b1a5-084efc8faa04 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.681162] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a684b598-5455-4952-a8b5-e4ca0df243ec {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.696712] env[70020]: DEBUG nova.compute.provider_tree [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 677.699134] env[70020]: DEBUG oslo_vmware.api [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Waiting for the task: (returnval){ [ 677.699134] env[70020]: value = "task-3617883" [ 677.699134] env[70020]: _type = "Task" [ 677.699134] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.713754] env[70020]: DEBUG oslo_vmware.api [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': task-3617883, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.855415] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2efc0ca8-1101-4645-8662-ea4fe4b05789 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 677.855415] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2efc0ca8-1101-4645-8662-ea4fe4b05789 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 677.855415] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-2efc0ca8-1101-4645-8662-ea4fe4b05789 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Deleting the datastore file [datastore1] 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 677.855415] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2009589e-b246-42ae-907d-3f67e88ea9ef {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.861923] env[70020]: DEBUG oslo_vmware.api [None req-2efc0ca8-1101-4645-8662-ea4fe4b05789 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Waiting for the task: (returnval){ [ 677.861923] env[70020]: value = "task-3617884" [ 677.861923] env[70020]: _type = "Task" [ 677.861923] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.870054] env[70020]: DEBUG oslo_vmware.api [None req-2efc0ca8-1101-4645-8662-ea4fe4b05789 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Task: {'id': task-3617884, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.877033] env[70020]: DEBUG nova.compute.manager [req-075caa5a-7786-4433-bca8-bc6550e13acb req-4198dbcb-f576-4c24-9bb7-db1fa5f3f310 service nova] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Received event network-vif-plugged-5107e297-9610-48b2-bcdf-2ec121611559 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 677.877254] env[70020]: DEBUG oslo_concurrency.lockutils [req-075caa5a-7786-4433-bca8-bc6550e13acb req-4198dbcb-f576-4c24-9bb7-db1fa5f3f310 service nova] Acquiring lock "7cf7f0a9-8240-4e78-b5d4-b1eb1da60764-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 677.877475] env[70020]: DEBUG oslo_concurrency.lockutils [req-075caa5a-7786-4433-bca8-bc6550e13acb req-4198dbcb-f576-4c24-9bb7-db1fa5f3f310 service nova] Lock "7cf7f0a9-8240-4e78-b5d4-b1eb1da60764-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 677.877634] env[70020]: DEBUG oslo_concurrency.lockutils [req-075caa5a-7786-4433-bca8-bc6550e13acb req-4198dbcb-f576-4c24-9bb7-db1fa5f3f310 service nova] Lock "7cf7f0a9-8240-4e78-b5d4-b1eb1da60764-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 677.877795] env[70020]: DEBUG nova.compute.manager [req-075caa5a-7786-4433-bca8-bc6550e13acb req-4198dbcb-f576-4c24-9bb7-db1fa5f3f310 service nova] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] No waiting events found dispatching network-vif-plugged-5107e297-9610-48b2-bcdf-2ec121611559 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 677.877952] env[70020]: WARNING nova.compute.manager [req-075caa5a-7786-4433-bca8-bc6550e13acb req-4198dbcb-f576-4c24-9bb7-db1fa5f3f310 service nova] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Received unexpected event network-vif-plugged-5107e297-9610-48b2-bcdf-2ec121611559 for instance with vm_state building and task_state spawning. [ 678.201482] env[70020]: DEBUG nova.scheduler.client.report [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 678.213436] env[70020]: DEBUG oslo_vmware.api [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': task-3617883, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.392813} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.213714] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 678.214488] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68d9ae65-8573-4b04-bff8-f72e635acbd9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.238428] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Reconfiguring VM instance instance-00000017 to attach disk [datastore1] 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb/50ce7a0c-aa80-4816-b84e-d8ff7b10fffb.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 678.239023] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1a562883-0458-4535-b812-cd5f9f2e92a4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.261925] env[70020]: DEBUG oslo_vmware.api [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Waiting for the task: (returnval){ [ 678.261925] env[70020]: value = "task-3617885" [ 678.261925] env[70020]: _type = "Task" [ 678.261925] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.272278] env[70020]: DEBUG oslo_vmware.api [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': task-3617885, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.278792] env[70020]: DEBUG nova.network.neutron [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 678.372721] env[70020]: DEBUG oslo_vmware.api [None req-2efc0ca8-1101-4645-8662-ea4fe4b05789 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Task: {'id': task-3617884, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.513816] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Acquiring lock "8f7e4e69-0796-469f-8a2b-4e19fbf15ed3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 678.514389] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Lock "8f7e4e69-0796-469f-8a2b-4e19fbf15ed3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 678.616133] env[70020]: DEBUG nova.network.neutron [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Updating instance_info_cache with network_info: [{"id": "5107e297-9610-48b2-bcdf-2ec121611559", "address": "fa:16:3e:e5:63:29", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.88", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5107e297-96", "ovs_interfaceid": "5107e297-9610-48b2-bcdf-2ec121611559", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 678.709713] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.624s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 678.710344] env[70020]: DEBUG nova.compute.manager [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 678.713640] env[70020]: DEBUG oslo_concurrency.lockutils [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.539s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 678.715905] env[70020]: INFO nova.compute.claims [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 678.778269] env[70020]: DEBUG oslo_vmware.api [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': task-3617885, 'name': ReconfigVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.873990] env[70020]: DEBUG oslo_vmware.api [None req-2efc0ca8-1101-4645-8662-ea4fe4b05789 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Task: {'id': task-3617884, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.77309} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.874421] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-2efc0ca8-1101-4645-8662-ea4fe4b05789 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 678.874698] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2efc0ca8-1101-4645-8662-ea4fe4b05789 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 678.874916] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2efc0ca8-1101-4645-8662-ea4fe4b05789 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 678.875172] env[70020]: INFO nova.compute.manager [None req-2efc0ca8-1101-4645-8662-ea4fe4b05789 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Took 3.96 seconds to destroy the instance on the hypervisor. [ 678.875459] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2efc0ca8-1101-4645-8662-ea4fe4b05789 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 678.875691] env[70020]: DEBUG nova.compute.manager [-] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 678.875830] env[70020]: DEBUG nova.network.neutron [-] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 679.119399] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Releasing lock "refresh_cache-7cf7f0a9-8240-4e78-b5d4-b1eb1da60764" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 679.119784] env[70020]: DEBUG nova.compute.manager [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Instance network_info: |[{"id": "5107e297-9610-48b2-bcdf-2ec121611559", "address": "fa:16:3e:e5:63:29", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.88", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5107e297-96", "ovs_interfaceid": "5107e297-9610-48b2-bcdf-2ec121611559", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 679.120244] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e5:63:29', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '822050c7-1845-485d-b87e-73778d21c33c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5107e297-9610-48b2-bcdf-2ec121611559', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 679.127823] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 679.128102] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 679.128352] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-afd36b4e-ecd8-47d9-8fb7-e8971059c930 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.148288] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 679.148288] env[70020]: value = "task-3617886" [ 679.148288] env[70020]: _type = "Task" [ 679.148288] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.157444] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617886, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.225426] env[70020]: DEBUG nova.compute.utils [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 679.228423] env[70020]: DEBUG nova.compute.manager [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 679.228681] env[70020]: DEBUG nova.network.neutron [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 679.273841] env[70020]: DEBUG oslo_vmware.api [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': task-3617885, 'name': ReconfigVM_Task, 'duration_secs': 0.552007} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.274248] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Reconfigured VM instance instance-00000017 to attach disk [datastore1] 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb/50ce7a0c-aa80-4816-b84e-d8ff7b10fffb.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 679.276251] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-27ab93d2-9876-412f-9a5c-b01b48440e5a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.279564] env[70020]: DEBUG nova.policy [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3b5901345bb14e1d800e299f59baeb44', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4ba9aef977894366a098bf5aa627a1ef', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 679.282564] env[70020]: DEBUG oslo_vmware.api [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Waiting for the task: (returnval){ [ 679.282564] env[70020]: value = "task-3617887" [ 679.282564] env[70020]: _type = "Task" [ 679.282564] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.291261] env[70020]: DEBUG oslo_vmware.api [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': task-3617887, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.531857] env[70020]: DEBUG oslo_concurrency.lockutils [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Acquiring lock "738d52c6-0368-434f-a14f-05b47ca865e3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 679.532118] env[70020]: DEBUG oslo_concurrency.lockutils [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Lock "738d52c6-0368-434f-a14f-05b47ca865e3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 679.554015] env[70020]: DEBUG oslo_concurrency.lockutils [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Acquiring lock "6f2bc97b-0f0a-4f16-b41c-7af96130783f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 679.556031] env[70020]: DEBUG oslo_concurrency.lockutils [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Lock "6f2bc97b-0f0a-4f16-b41c-7af96130783f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 679.663242] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617886, 'name': CreateVM_Task, 'duration_secs': 0.356068} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.663502] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 679.664566] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.666119] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 679.666480] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 679.667487] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51af692e-5aff-4765-85eb-05993789252f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.674676] env[70020]: DEBUG oslo_vmware.api [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Waiting for the task: (returnval){ [ 679.674676] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52018368-1a4e-015d-9879-722d0632fa54" [ 679.674676] env[70020]: _type = "Task" [ 679.674676] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.684057] env[70020]: DEBUG oslo_vmware.api [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52018368-1a4e-015d-9879-722d0632fa54, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.708438] env[70020]: DEBUG nova.network.neutron [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Successfully created port: 0e881a0f-2c6d-43ba-ad4e-bc9ce172d429 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 679.736219] env[70020]: DEBUG nova.compute.manager [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 679.747272] env[70020]: DEBUG nova.network.neutron [-] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 679.796342] env[70020]: DEBUG oslo_vmware.api [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': task-3617887, 'name': Rename_Task, 'duration_secs': 0.162091} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.799661] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 679.799908] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2cacb60e-97fb-4e30-a9ce-5debd1edef3c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.807353] env[70020]: DEBUG oslo_vmware.api [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Waiting for the task: (returnval){ [ 679.807353] env[70020]: value = "task-3617888" [ 679.807353] env[70020]: _type = "Task" [ 679.807353] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.818827] env[70020]: DEBUG oslo_vmware.api [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': task-3617888, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.008174] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Acquiring lock "ae91adc5-b3a4-4503-91f2-d803eaefedc5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 680.008174] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Lock "ae91adc5-b3a4-4503-91f2-d803eaefedc5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 680.190037] env[70020]: DEBUG oslo_vmware.api [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52018368-1a4e-015d-9879-722d0632fa54, 'name': SearchDatastore_Task, 'duration_secs': 0.011831} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.190550] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 680.190550] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 680.190961] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.191221] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 680.191487] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 680.191831] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1739a96e-1507-4714-b398-3db6c1d2e37f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.206360] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 680.206360] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 680.209535] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a81b9cc5-c36b-4c82-b88b-b3c5d98e7f4d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.216048] env[70020]: DEBUG oslo_vmware.api [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Waiting for the task: (returnval){ [ 680.216048] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]524ab383-0c1a-2cde-aa86-24cdc0ce8477" [ 680.216048] env[70020]: _type = "Task" [ 680.216048] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.226901] env[70020]: DEBUG oslo_vmware.api [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]524ab383-0c1a-2cde-aa86-24cdc0ce8477, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.250332] env[70020]: INFO nova.compute.manager [-] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Took 1.37 seconds to deallocate network for instance. [ 680.282835] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee6b587b-814b-48fa-80b6-8200caed080c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.295329] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29c5df5d-0681-48da-b957-89539fcf160e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.330549] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21326117-ff26-4aed-9024-4cb12d6a735d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.341314] env[70020]: DEBUG oslo_vmware.api [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': task-3617888, 'name': PowerOnVM_Task} progress is 87%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.342524] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b148c32e-f95e-43f4-a170-29e3b87dbade {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.356190] env[70020]: DEBUG nova.compute.provider_tree [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 680.612792] env[70020]: DEBUG nova.compute.manager [req-254abb53-b0ba-495f-827f-0bbc609e4c6b req-a205129c-23af-4369-87d1-aa2506b22435 service nova] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Received event network-changed-5107e297-9610-48b2-bcdf-2ec121611559 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 680.613416] env[70020]: DEBUG nova.compute.manager [req-254abb53-b0ba-495f-827f-0bbc609e4c6b req-a205129c-23af-4369-87d1-aa2506b22435 service nova] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Refreshing instance network info cache due to event network-changed-5107e297-9610-48b2-bcdf-2ec121611559. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 680.613416] env[70020]: DEBUG oslo_concurrency.lockutils [req-254abb53-b0ba-495f-827f-0bbc609e4c6b req-a205129c-23af-4369-87d1-aa2506b22435 service nova] Acquiring lock "refresh_cache-7cf7f0a9-8240-4e78-b5d4-b1eb1da60764" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.613946] env[70020]: DEBUG oslo_concurrency.lockutils [req-254abb53-b0ba-495f-827f-0bbc609e4c6b req-a205129c-23af-4369-87d1-aa2506b22435 service nova] Acquired lock "refresh_cache-7cf7f0a9-8240-4e78-b5d4-b1eb1da60764" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 680.614158] env[70020]: DEBUG nova.network.neutron [req-254abb53-b0ba-495f-827f-0bbc609e4c6b req-a205129c-23af-4369-87d1-aa2506b22435 service nova] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Refreshing network info cache for port 5107e297-9610-48b2-bcdf-2ec121611559 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 680.743023] env[70020]: DEBUG oslo_vmware.api [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]524ab383-0c1a-2cde-aa86-24cdc0ce8477, 'name': SearchDatastore_Task, 'duration_secs': 0.024169} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.743023] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96e6eab3-af31-41ba-882b-b9333350939c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.746715] env[70020]: DEBUG nova.compute.manager [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 680.758292] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2efc0ca8-1101-4645-8662-ea4fe4b05789 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 680.758903] env[70020]: DEBUG oslo_vmware.api [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Waiting for the task: (returnval){ [ 680.758903] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]527885d6-b23e-538f-becf-ef08f516f573" [ 680.758903] env[70020]: _type = "Task" [ 680.758903] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.767884] env[70020]: DEBUG oslo_vmware.api [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]527885d6-b23e-538f-becf-ef08f516f573, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.777951] env[70020]: DEBUG nova.virt.hardware [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 680.778295] env[70020]: DEBUG nova.virt.hardware [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 680.778487] env[70020]: DEBUG nova.virt.hardware [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 680.778731] env[70020]: DEBUG nova.virt.hardware [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 680.778956] env[70020]: DEBUG nova.virt.hardware [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 680.779290] env[70020]: DEBUG nova.virt.hardware [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 680.779597] env[70020]: DEBUG nova.virt.hardware [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 680.779798] env[70020]: DEBUG nova.virt.hardware [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 680.780705] env[70020]: DEBUG nova.virt.hardware [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 680.780705] env[70020]: DEBUG nova.virt.hardware [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 680.780705] env[70020]: DEBUG nova.virt.hardware [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 680.781405] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6db25435-2445-428e-b9a5-80de6acc9382 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.788633] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec112132-e1be-4bbd-b5f2-8c8ca9bd4e91 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.836460] env[70020]: DEBUG oslo_vmware.api [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': task-3617888, 'name': PowerOnVM_Task, 'duration_secs': 0.93305} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.837057] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 680.837332] env[70020]: INFO nova.compute.manager [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Took 9.45 seconds to spawn the instance on the hypervisor. [ 680.837573] env[70020]: DEBUG nova.compute.manager [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 680.838603] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d9a6a5f-1ff7-4387-b56e-ae5773497609 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.859989] env[70020]: DEBUG nova.scheduler.client.report [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 680.900243] env[70020]: DEBUG nova.compute.manager [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 680.900243] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7b65c38-4723-4cab-b864-341ac601ed0b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.271171] env[70020]: DEBUG oslo_vmware.api [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]527885d6-b23e-538f-becf-ef08f516f573, 'name': SearchDatastore_Task, 'duration_secs': 0.018357} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.271947] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 681.272485] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764/7cf7f0a9-8240-4e78-b5d4-b1eb1da60764.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 681.274017] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-591d8d34-c1f0-4de1-a18a-098044b92906 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.282137] env[70020]: DEBUG oslo_vmware.api [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Waiting for the task: (returnval){ [ 681.282137] env[70020]: value = "task-3617889" [ 681.282137] env[70020]: _type = "Task" [ 681.282137] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.292261] env[70020]: DEBUG oslo_vmware.api [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': task-3617889, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.356829] env[70020]: INFO nova.compute.manager [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Took 34.63 seconds to build instance. [ 681.366475] env[70020]: DEBUG oslo_concurrency.lockutils [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.652s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 681.366475] env[70020]: DEBUG nova.compute.manager [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 681.370034] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5aba528f-b966-4595-8002-007d724299f6 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.385s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 681.370115] env[70020]: DEBUG nova.objects.instance [None req-5aba528f-b966-4595-8002-007d724299f6 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Lazy-loading 'resources' on Instance uuid b0b825d4-534d-4d54-a0c4-b9e507726c47 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 681.412087] env[70020]: INFO nova.compute.manager [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] instance snapshotting [ 681.414701] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7208d82-e3dc-4718-9295-8b21d3281716 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.440081] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85743b21-2c6f-45bd-b18d-e962a6b229a6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.501151] env[70020]: DEBUG nova.network.neutron [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Successfully updated port: 0e881a0f-2c6d-43ba-ad4e-bc9ce172d429 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 681.574967] env[70020]: DEBUG nova.compute.manager [req-7d8c8475-ff72-4c46-bb1d-e092105b833c req-9e593307-831d-4924-8a97-78ab6069f0cb service nova] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Received event network-vif-plugged-0e881a0f-2c6d-43ba-ad4e-bc9ce172d429 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 681.575192] env[70020]: DEBUG oslo_concurrency.lockutils [req-7d8c8475-ff72-4c46-bb1d-e092105b833c req-9e593307-831d-4924-8a97-78ab6069f0cb service nova] Acquiring lock "c4335d00-29a3-4f2e-b826-1a78ef02e0bf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 681.575449] env[70020]: DEBUG oslo_concurrency.lockutils [req-7d8c8475-ff72-4c46-bb1d-e092105b833c req-9e593307-831d-4924-8a97-78ab6069f0cb service nova] Lock "c4335d00-29a3-4f2e-b826-1a78ef02e0bf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 681.575632] env[70020]: DEBUG oslo_concurrency.lockutils [req-7d8c8475-ff72-4c46-bb1d-e092105b833c req-9e593307-831d-4924-8a97-78ab6069f0cb service nova] Lock "c4335d00-29a3-4f2e-b826-1a78ef02e0bf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 681.575769] env[70020]: DEBUG nova.compute.manager [req-7d8c8475-ff72-4c46-bb1d-e092105b833c req-9e593307-831d-4924-8a97-78ab6069f0cb service nova] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] No waiting events found dispatching network-vif-plugged-0e881a0f-2c6d-43ba-ad4e-bc9ce172d429 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 681.575933] env[70020]: WARNING nova.compute.manager [req-7d8c8475-ff72-4c46-bb1d-e092105b833c req-9e593307-831d-4924-8a97-78ab6069f0cb service nova] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Received unexpected event network-vif-plugged-0e881a0f-2c6d-43ba-ad4e-bc9ce172d429 for instance with vm_state building and task_state spawning. [ 681.603414] env[70020]: DEBUG nova.network.neutron [req-254abb53-b0ba-495f-827f-0bbc609e4c6b req-a205129c-23af-4369-87d1-aa2506b22435 service nova] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Updated VIF entry in instance network info cache for port 5107e297-9610-48b2-bcdf-2ec121611559. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 681.603836] env[70020]: DEBUG nova.network.neutron [req-254abb53-b0ba-495f-827f-0bbc609e4c6b req-a205129c-23af-4369-87d1-aa2506b22435 service nova] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Updating instance_info_cache with network_info: [{"id": "5107e297-9610-48b2-bcdf-2ec121611559", "address": "fa:16:3e:e5:63:29", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.88", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5107e297-96", "ovs_interfaceid": "5107e297-9610-48b2-bcdf-2ec121611559", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 681.794022] env[70020]: DEBUG oslo_vmware.api [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': task-3617889, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.860729] env[70020]: DEBUG oslo_concurrency.lockutils [None req-39e3d598-6e58-4eb3-adc6-2748402a8d15 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Lock "50ce7a0c-aa80-4816-b84e-d8ff7b10fffb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.158s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 681.873527] env[70020]: DEBUG nova.compute.utils [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 681.876301] env[70020]: DEBUG nova.compute.manager [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 681.878322] env[70020]: DEBUG nova.network.neutron [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 681.958677] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Creating Snapshot of the VM instance {{(pid=70020) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 681.959710] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-a8b35617-fc1b-4977-93b4-77476769a7e8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.967263] env[70020]: DEBUG oslo_vmware.api [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Waiting for the task: (returnval){ [ 681.967263] env[70020]: value = "task-3617890" [ 681.967263] env[70020]: _type = "Task" [ 681.967263] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.975448] env[70020]: DEBUG oslo_vmware.api [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617890, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.990268] env[70020]: DEBUG nova.policy [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f2d2bb1595854528850866f2621adcb4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '37492ec68df9439e860aaacce6f58bcf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 682.004608] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Acquiring lock "refresh_cache-c4335d00-29a3-4f2e-b826-1a78ef02e0bf" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 682.005380] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Acquired lock "refresh_cache-c4335d00-29a3-4f2e-b826-1a78ef02e0bf" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 682.007011] env[70020]: DEBUG nova.network.neutron [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 682.105935] env[70020]: DEBUG oslo_concurrency.lockutils [req-254abb53-b0ba-495f-827f-0bbc609e4c6b req-a205129c-23af-4369-87d1-aa2506b22435 service nova] Releasing lock "refresh_cache-7cf7f0a9-8240-4e78-b5d4-b1eb1da60764" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 682.106709] env[70020]: DEBUG nova.compute.manager [req-254abb53-b0ba-495f-827f-0bbc609e4c6b req-a205129c-23af-4369-87d1-aa2506b22435 service nova] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Received event network-vif-deleted-09f3d7f9-1529-498f-b393-01af888741b2 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 682.293678] env[70020]: DEBUG oslo_vmware.api [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': task-3617889, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.698266} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.296407] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764/7cf7f0a9-8240-4e78-b5d4-b1eb1da60764.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 682.296646] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 682.298114] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a4e8b4ee-3c9c-4656-aea1-98f9b1605ae6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.307995] env[70020]: DEBUG oslo_vmware.api [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Waiting for the task: (returnval){ [ 682.307995] env[70020]: value = "task-3617891" [ 682.307995] env[70020]: _type = "Task" [ 682.307995] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.320168] env[70020]: DEBUG oslo_vmware.api [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': task-3617891, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.364834] env[70020]: DEBUG nova.compute.manager [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 682.385069] env[70020]: DEBUG nova.compute.manager [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 682.473959] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ef57044-5332-42fa-a148-2c4729a9a942 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.485535] env[70020]: DEBUG oslo_vmware.api [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617890, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.486663] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19d0be4e-33aa-4c45-8ff3-ce62f275f758 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.522115] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c43b9a7a-16c3-4522-a828-2a8e8d7b8216 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.530860] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d9d8258-1506-4d86-af6d-97533dbad6e0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.545415] env[70020]: DEBUG nova.compute.provider_tree [None req-5aba528f-b966-4595-8002-007d724299f6 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 682.573211] env[70020]: DEBUG nova.network.neutron [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 682.767675] env[70020]: DEBUG nova.network.neutron [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Updating instance_info_cache with network_info: [{"id": "0e881a0f-2c6d-43ba-ad4e-bc9ce172d429", "address": "fa:16:3e:33:10:c8", "network": {"id": "78d73c6d-9612-41c9-8f3d-c15281c3ae04", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1821259165-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ba9aef977894366a098bf5aa627a1ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b0fa7a2-ebd9-4788-8904-7bf250ce466c", "external-id": "nsx-vlan-transportzone-669", "segmentation_id": 669, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e881a0f-2c", "ovs_interfaceid": "0e881a0f-2c6d-43ba-ad4e-bc9ce172d429", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.821176] env[70020]: DEBUG oslo_vmware.api [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': task-3617891, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06144} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.821496] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 682.822432] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44548926-b6d9-40ec-81ea-22bcc5f088f4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.847188] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Reconfiguring VM instance instance-00000018 to attach disk [datastore1] 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764/7cf7f0a9-8240-4e78-b5d4-b1eb1da60764.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 682.847862] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-05e4bd2d-2e6b-42ae-b4de-dc6f6d529dda {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.876856] env[70020]: DEBUG oslo_vmware.api [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Waiting for the task: (returnval){ [ 682.876856] env[70020]: value = "task-3617892" [ 682.876856] env[70020]: _type = "Task" [ 682.876856] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.896074] env[70020]: DEBUG oslo_vmware.api [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': task-3617892, 'name': ReconfigVM_Task} progress is 10%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.902060] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 682.907208] env[70020]: DEBUG nova.network.neutron [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Successfully created port: 3b22879c-7b2b-4ab9-9749-21a544891657 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 682.980492] env[70020]: DEBUG oslo_vmware.api [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617890, 'name': CreateSnapshot_Task, 'duration_secs': 0.635125} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.980879] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Created Snapshot of the VM instance {{(pid=70020) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 682.981638] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8adfca65-7926-4c1f-bac0-24001bcc3889 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.048245] env[70020]: DEBUG nova.scheduler.client.report [None req-5aba528f-b966-4595-8002-007d724299f6 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 683.271813] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Releasing lock "refresh_cache-c4335d00-29a3-4f2e-b826-1a78ef02e0bf" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 683.271813] env[70020]: DEBUG nova.compute.manager [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Instance network_info: |[{"id": "0e881a0f-2c6d-43ba-ad4e-bc9ce172d429", "address": "fa:16:3e:33:10:c8", "network": {"id": "78d73c6d-9612-41c9-8f3d-c15281c3ae04", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1821259165-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ba9aef977894366a098bf5aa627a1ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b0fa7a2-ebd9-4788-8904-7bf250ce466c", "external-id": "nsx-vlan-transportzone-669", "segmentation_id": 669, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e881a0f-2c", "ovs_interfaceid": "0e881a0f-2c6d-43ba-ad4e-bc9ce172d429", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 683.271990] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:33:10:c8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5b0fa7a2-ebd9-4788-8904-7bf250ce466c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0e881a0f-2c6d-43ba-ad4e-bc9ce172d429', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 683.283186] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Creating folder: Project (4ba9aef977894366a098bf5aa627a1ef). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 683.283569] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d7288817-6aa2-496e-8873-892cd3cc8526 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.297860] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Created folder: Project (4ba9aef977894366a098bf5aa627a1ef) in parent group-v721521. [ 683.298190] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Creating folder: Instances. Parent ref: group-v721605. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 683.298390] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4e533dd8-f725-47a3-abc6-ae3c054c9443 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.309506] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Created folder: Instances in parent group-v721605. [ 683.309737] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 683.309994] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 683.310227] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9c5ee0b9-7c8c-4e27-b987-71ea3ed2208b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.332168] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 683.332168] env[70020]: value = "task-3617895" [ 683.332168] env[70020]: _type = "Task" [ 683.332168] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.341600] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617895, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.392779] env[70020]: DEBUG oslo_vmware.api [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': task-3617892, 'name': ReconfigVM_Task, 'duration_secs': 0.306169} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.393793] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Reconfigured VM instance instance-00000018 to attach disk [datastore1] 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764/7cf7f0a9-8240-4e78-b5d4-b1eb1da60764.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 683.394670] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-69e93ade-2d32-45cd-8e5f-17f53412909d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.399064] env[70020]: DEBUG nova.compute.manager [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 683.407919] env[70020]: DEBUG oslo_vmware.api [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Waiting for the task: (returnval){ [ 683.407919] env[70020]: value = "task-3617896" [ 683.407919] env[70020]: _type = "Task" [ 683.407919] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.420725] env[70020]: DEBUG oslo_vmware.api [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': task-3617896, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.437208] env[70020]: DEBUG nova.virt.hardware [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 683.437739] env[70020]: DEBUG nova.virt.hardware [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 683.437827] env[70020]: DEBUG nova.virt.hardware [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 683.437991] env[70020]: DEBUG nova.virt.hardware [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 683.438227] env[70020]: DEBUG nova.virt.hardware [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 683.438480] env[70020]: DEBUG nova.virt.hardware [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 683.438871] env[70020]: DEBUG nova.virt.hardware [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 683.440110] env[70020]: DEBUG nova.virt.hardware [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 683.440110] env[70020]: DEBUG nova.virt.hardware [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 683.440110] env[70020]: DEBUG nova.virt.hardware [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 683.440110] env[70020]: DEBUG nova.virt.hardware [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 683.441440] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bbe7077-e925-41d7-a013-e9617715df56 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.451905] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c680541c-9f2a-4ec0-945c-16a32fac92fa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.501859] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Creating linked-clone VM from snapshot {{(pid=70020) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 683.502501] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-dad62859-a6a7-47e5-9b87-c3879ed0782a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.514103] env[70020]: DEBUG oslo_vmware.api [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Waiting for the task: (returnval){ [ 683.514103] env[70020]: value = "task-3617897" [ 683.514103] env[70020]: _type = "Task" [ 683.514103] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.525182] env[70020]: DEBUG oslo_vmware.api [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617897, 'name': CloneVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.557998] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5aba528f-b966-4595-8002-007d724299f6 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.188s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 683.561483] env[70020]: DEBUG oslo_concurrency.lockutils [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.675s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 683.562204] env[70020]: INFO nova.compute.claims [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 683.587444] env[70020]: INFO nova.scheduler.client.report [None req-5aba528f-b966-4595-8002-007d724299f6 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Deleted allocations for instance b0b825d4-534d-4d54-a0c4-b9e507726c47 [ 683.845565] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617895, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.919709] env[70020]: DEBUG oslo_vmware.api [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': task-3617896, 'name': Rename_Task, 'duration_secs': 0.164453} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.920066] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 683.920323] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ccfc5dfa-0bcd-4d65-85d1-46cc56e83383 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.929633] env[70020]: DEBUG oslo_vmware.api [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Waiting for the task: (returnval){ [ 683.929633] env[70020]: value = "task-3617898" [ 683.929633] env[70020]: _type = "Task" [ 683.929633] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.938179] env[70020]: DEBUG oslo_vmware.api [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': task-3617898, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.028336] env[70020]: DEBUG oslo_vmware.api [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617897, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.097123] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5aba528f-b966-4595-8002-007d724299f6 tempest-ServersAdmin275Test-2116430007 tempest-ServersAdmin275Test-2116430007-project-member] Lock "b0b825d4-534d-4d54-a0c4-b9e507726c47" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.693s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 684.346128] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617895, 'name': CreateVM_Task, 'duration_secs': 0.828198} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.346128] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 684.347045] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 684.347380] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 684.347826] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 684.348234] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9abab0ca-2691-45c3-bcd7-7d7d05d4f0f9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.354789] env[70020]: DEBUG oslo_vmware.api [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Waiting for the task: (returnval){ [ 684.354789] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]527f05f0-388e-db09-b261-98a42290e6e1" [ 684.354789] env[70020]: _type = "Task" [ 684.354789] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.364662] env[70020]: DEBUG oslo_vmware.api [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]527f05f0-388e-db09-b261-98a42290e6e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.444280] env[70020]: DEBUG oslo_vmware.api [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': task-3617898, 'name': PowerOnVM_Task, 'duration_secs': 0.482485} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.444771] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 684.448429] env[70020]: INFO nova.compute.manager [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Took 9.00 seconds to spawn the instance on the hypervisor. [ 684.448429] env[70020]: DEBUG nova.compute.manager [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 684.448429] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32082410-11f9-42a5-a143-4b65b4b80d2e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.591041] env[70020]: DEBUG oslo_vmware.api [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617897, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.866378] env[70020]: DEBUG oslo_vmware.api [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]527f05f0-388e-db09-b261-98a42290e6e1, 'name': SearchDatastore_Task, 'duration_secs': 0.033964} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.869101] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 684.869352] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 684.869578] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 684.869717] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 684.869923] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 684.870384] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4bfde852-aad6-429d-b255-f7d79b0effff {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.885060] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 684.885250] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 684.885984] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f35dfee-2ed1-45ba-b16b-24e31be45542 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.892735] env[70020]: DEBUG oslo_vmware.api [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Waiting for the task: (returnval){ [ 684.892735] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52f9e5dd-c3fb-abb1-b4e5-6ee32aec214d" [ 684.892735] env[70020]: _type = "Task" [ 684.892735] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.904017] env[70020]: DEBUG oslo_vmware.api [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52f9e5dd-c3fb-abb1-b4e5-6ee32aec214d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.969917] env[70020]: INFO nova.compute.manager [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Took 37.02 seconds to build instance. [ 685.028766] env[70020]: DEBUG oslo_vmware.api [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617897, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.063839] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dbf4076-3996-4bcd-bd94-5244f0806edb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.072737] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc89c677-baad-4987-8344-adbe37d7e1cc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.111017] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-127c3c55-3e92-4a0d-8b8a-2dfa5a089ebf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.119773] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8fb2766-4e1d-410e-b715-52708e148256 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.135846] env[70020]: DEBUG nova.compute.provider_tree [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 685.360576] env[70020]: DEBUG nova.network.neutron [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Successfully updated port: 3b22879c-7b2b-4ab9-9749-21a544891657 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 685.369864] env[70020]: DEBUG nova.compute.manager [req-1bcff5b1-6f12-4325-b552-1ffc6ff31d94 req-c5949705-1b23-4b36-9bdf-e04b88bc0cf3 service nova] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Received event network-changed-0e881a0f-2c6d-43ba-ad4e-bc9ce172d429 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 685.370090] env[70020]: DEBUG nova.compute.manager [req-1bcff5b1-6f12-4325-b552-1ffc6ff31d94 req-c5949705-1b23-4b36-9bdf-e04b88bc0cf3 service nova] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Refreshing instance network info cache due to event network-changed-0e881a0f-2c6d-43ba-ad4e-bc9ce172d429. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 685.370362] env[70020]: DEBUG oslo_concurrency.lockutils [req-1bcff5b1-6f12-4325-b552-1ffc6ff31d94 req-c5949705-1b23-4b36-9bdf-e04b88bc0cf3 service nova] Acquiring lock "refresh_cache-c4335d00-29a3-4f2e-b826-1a78ef02e0bf" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.370555] env[70020]: DEBUG oslo_concurrency.lockutils [req-1bcff5b1-6f12-4325-b552-1ffc6ff31d94 req-c5949705-1b23-4b36-9bdf-e04b88bc0cf3 service nova] Acquired lock "refresh_cache-c4335d00-29a3-4f2e-b826-1a78ef02e0bf" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 685.370999] env[70020]: DEBUG nova.network.neutron [req-1bcff5b1-6f12-4325-b552-1ffc6ff31d94 req-c5949705-1b23-4b36-9bdf-e04b88bc0cf3 service nova] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Refreshing network info cache for port 0e881a0f-2c6d-43ba-ad4e-bc9ce172d429 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 685.406062] env[70020]: DEBUG oslo_vmware.api [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52f9e5dd-c3fb-abb1-b4e5-6ee32aec214d, 'name': SearchDatastore_Task, 'duration_secs': 0.025609} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.406889] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a76418a-7d80-4cd6-8f94-3a4faab106ef {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.412831] env[70020]: DEBUG oslo_vmware.api [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Waiting for the task: (returnval){ [ 685.412831] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]520689b2-eb22-c935-f80a-69424e8945dd" [ 685.412831] env[70020]: _type = "Task" [ 685.412831] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.422240] env[70020]: DEBUG oslo_vmware.api [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]520689b2-eb22-c935-f80a-69424e8945dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.450643] env[70020]: DEBUG nova.compute.manager [req-677d41a0-466a-4967-a891-38036bf09207 req-39f3a22e-a13b-4830-be77-10d1df35606e service nova] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Received event network-changed-4e709a63-45c3-48e8-8762-26e149c61266 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 685.450865] env[70020]: DEBUG nova.compute.manager [req-677d41a0-466a-4967-a891-38036bf09207 req-39f3a22e-a13b-4830-be77-10d1df35606e service nova] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Refreshing instance network info cache due to event network-changed-4e709a63-45c3-48e8-8762-26e149c61266. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 685.451161] env[70020]: DEBUG oslo_concurrency.lockutils [req-677d41a0-466a-4967-a891-38036bf09207 req-39f3a22e-a13b-4830-be77-10d1df35606e service nova] Acquiring lock "refresh_cache-d601179a-df77-4f2e-b8df-9185b8a485e3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.451345] env[70020]: DEBUG oslo_concurrency.lockutils [req-677d41a0-466a-4967-a891-38036bf09207 req-39f3a22e-a13b-4830-be77-10d1df35606e service nova] Acquired lock "refresh_cache-d601179a-df77-4f2e-b8df-9185b8a485e3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 685.451541] env[70020]: DEBUG nova.network.neutron [req-677d41a0-466a-4967-a891-38036bf09207 req-39f3a22e-a13b-4830-be77-10d1df35606e service nova] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Refreshing network info cache for port 4e709a63-45c3-48e8-8762-26e149c61266 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 685.474557] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3a0aeb12-7aa0-418d-94e3-66b632cefbcb tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Lock "7cf7f0a9-8240-4e78-b5d4-b1eb1da60764" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.007s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 685.527463] env[70020]: DEBUG oslo_vmware.api [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617897, 'name': CloneVM_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.639992] env[70020]: DEBUG nova.scheduler.client.report [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 685.863560] env[70020]: DEBUG oslo_concurrency.lockutils [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Acquiring lock "refresh_cache-f53cb08c-0939-4cb1-8476-8b289d6a1b05" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.864536] env[70020]: DEBUG oslo_concurrency.lockutils [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Acquired lock "refresh_cache-f53cb08c-0939-4cb1-8476-8b289d6a1b05" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 685.866772] env[70020]: DEBUG nova.network.neutron [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 685.917756] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Acquiring lock "0add6226-3b90-4991-8f2b-81c35e72a7df" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 685.917988] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Lock "0add6226-3b90-4991-8f2b-81c35e72a7df" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 685.932281] env[70020]: DEBUG oslo_vmware.api [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]520689b2-eb22-c935-f80a-69424e8945dd, 'name': SearchDatastore_Task, 'duration_secs': 0.036089} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.932525] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 685.932765] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] c4335d00-29a3-4f2e-b826-1a78ef02e0bf/c4335d00-29a3-4f2e-b826-1a78ef02e0bf.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 685.933013] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-943002b6-bb0d-4423-af6a-815a662a5fe1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.940294] env[70020]: DEBUG oslo_vmware.api [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Waiting for the task: (returnval){ [ 685.940294] env[70020]: value = "task-3617899" [ 685.940294] env[70020]: _type = "Task" [ 685.940294] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.949317] env[70020]: DEBUG oslo_vmware.api [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Task: {'id': task-3617899, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.979675] env[70020]: DEBUG nova.compute.manager [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 685.993311] env[70020]: DEBUG nova.compute.manager [None req-d5536acf-40bb-48dd-9d7c-7e084a7ae9ec tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 685.996483] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84c50842-8f2d-45a2-8b61-5683ccf45122 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.030169] env[70020]: DEBUG oslo_vmware.api [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617897, 'name': CloneVM_Task, 'duration_secs': 2.097821} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.031782] env[70020]: INFO nova.virt.vmwareapi.vmops [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Created linked-clone VM from snapshot [ 686.032605] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e87f5d0e-b2be-4d3e-9dac-660db21441b3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.042141] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Uploading image b4a35e1b-e812-4975-a414-1bbb1e714c78 {{(pid=70020) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 686.069377] env[70020]: DEBUG oslo_vmware.rw_handles [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 686.069377] env[70020]: value = "vm-721608" [ 686.069377] env[70020]: _type = "VirtualMachine" [ 686.069377] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 686.069377] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-c490c3c2-9b4d-4cba-9a68-d790496c691d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.077435] env[70020]: DEBUG oslo_vmware.rw_handles [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Lease: (returnval){ [ 686.077435] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52cb3acf-d11e-ba9b-1b2c-af133d0c25fb" [ 686.077435] env[70020]: _type = "HttpNfcLease" [ 686.077435] env[70020]: } obtained for exporting VM: (result){ [ 686.077435] env[70020]: value = "vm-721608" [ 686.077435] env[70020]: _type = "VirtualMachine" [ 686.077435] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 686.077713] env[70020]: DEBUG oslo_vmware.api [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Waiting for the lease: (returnval){ [ 686.077713] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52cb3acf-d11e-ba9b-1b2c-af133d0c25fb" [ 686.077713] env[70020]: _type = "HttpNfcLease" [ 686.077713] env[70020]: } to be ready. {{(pid=70020) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 686.086131] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 686.086131] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52cb3acf-d11e-ba9b-1b2c-af133d0c25fb" [ 686.086131] env[70020]: _type = "HttpNfcLease" [ 686.086131] env[70020]: } is initializing. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 686.144700] env[70020]: DEBUG oslo_concurrency.lockutils [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.584s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 686.145384] env[70020]: DEBUG nova.compute.manager [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 686.151523] env[70020]: DEBUG oslo_concurrency.lockutils [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 25.582s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 686.262088] env[70020]: DEBUG nova.network.neutron [req-677d41a0-466a-4967-a891-38036bf09207 req-39f3a22e-a13b-4830-be77-10d1df35606e service nova] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Updated VIF entry in instance network info cache for port 4e709a63-45c3-48e8-8762-26e149c61266. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 686.262504] env[70020]: DEBUG nova.network.neutron [req-677d41a0-466a-4967-a891-38036bf09207 req-39f3a22e-a13b-4830-be77-10d1df35606e service nova] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Updating instance_info_cache with network_info: [{"id": "4e709a63-45c3-48e8-8762-26e149c61266", "address": "fa:16:3e:ef:1c:11", "network": {"id": "b284478c-c5d9-48ae-8f9e-db4ed9b7d525", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-354423789-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "add37b0346e74e7f9724e69253e2cffc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b931c4c-f73c-4fbd-9c9f-0270834cc69e", "external-id": "nsx-vlan-transportzone-177", "segmentation_id": 177, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e709a63-45", "ovs_interfaceid": "4e709a63-45c3-48e8-8762-26e149c61266", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.302251] env[70020]: DEBUG nova.network.neutron [req-1bcff5b1-6f12-4325-b552-1ffc6ff31d94 req-c5949705-1b23-4b36-9bdf-e04b88bc0cf3 service nova] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Updated VIF entry in instance network info cache for port 0e881a0f-2c6d-43ba-ad4e-bc9ce172d429. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 686.302677] env[70020]: DEBUG nova.network.neutron [req-1bcff5b1-6f12-4325-b552-1ffc6ff31d94 req-c5949705-1b23-4b36-9bdf-e04b88bc0cf3 service nova] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Updating instance_info_cache with network_info: [{"id": "0e881a0f-2c6d-43ba-ad4e-bc9ce172d429", "address": "fa:16:3e:33:10:c8", "network": {"id": "78d73c6d-9612-41c9-8f3d-c15281c3ae04", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1821259165-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ba9aef977894366a098bf5aa627a1ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b0fa7a2-ebd9-4788-8904-7bf250ce466c", "external-id": "nsx-vlan-transportzone-669", "segmentation_id": 669, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e881a0f-2c", "ovs_interfaceid": "0e881a0f-2c6d-43ba-ad4e-bc9ce172d429", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.432786] env[70020]: DEBUG nova.network.neutron [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 686.454617] env[70020]: DEBUG oslo_vmware.api [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Task: {'id': task-3617899, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.509658] env[70020]: DEBUG oslo_concurrency.lockutils [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 686.511428] env[70020]: INFO nova.compute.manager [None req-d5536acf-40bb-48dd-9d7c-7e084a7ae9ec tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] instance snapshotting [ 686.514993] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a3ecbef-f6c6-4e2f-8c7f-30c8756fe9e3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.547512] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fd8da5e-e487-4983-9a29-3d73ea27ac84 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.586058] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 686.586058] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52cb3acf-d11e-ba9b-1b2c-af133d0c25fb" [ 686.586058] env[70020]: _type = "HttpNfcLease" [ 686.586058] env[70020]: } is ready. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 686.586382] env[70020]: DEBUG oslo_vmware.rw_handles [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 686.586382] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52cb3acf-d11e-ba9b-1b2c-af133d0c25fb" [ 686.586382] env[70020]: _type = "HttpNfcLease" [ 686.586382] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 686.587235] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6a84110-193c-4e8d-a565-6afbc254a3e7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.597446] env[70020]: DEBUG oslo_vmware.rw_handles [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523050f0-3f41-84ce-0717-72c6cf43ac46/disk-0.vmdk from lease info. {{(pid=70020) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 686.597673] env[70020]: DEBUG oslo_vmware.rw_handles [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523050f0-3f41-84ce-0717-72c6cf43ac46/disk-0.vmdk for reading. {{(pid=70020) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 686.656531] env[70020]: DEBUG nova.compute.utils [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 686.663096] env[70020]: INFO nova.compute.claims [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 686.666467] env[70020]: DEBUG nova.compute.manager [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 686.666644] env[70020]: DEBUG nova.network.neutron [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 686.728269] env[70020]: DEBUG nova.policy [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c0e22e21d3684201883adc3617ddee72', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b3a2dc07c1d447ea81ca142d80ab4210', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 686.732611] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-6b7688c3-7dca-4d77-bceb-da13f3e164dd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.770965] env[70020]: DEBUG oslo_concurrency.lockutils [req-677d41a0-466a-4967-a891-38036bf09207 req-39f3a22e-a13b-4830-be77-10d1df35606e service nova] Releasing lock "refresh_cache-d601179a-df77-4f2e-b8df-9185b8a485e3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 686.807349] env[70020]: DEBUG oslo_concurrency.lockutils [req-1bcff5b1-6f12-4325-b552-1ffc6ff31d94 req-c5949705-1b23-4b36-9bdf-e04b88bc0cf3 service nova] Releasing lock "refresh_cache-c4335d00-29a3-4f2e-b826-1a78ef02e0bf" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 686.813304] env[70020]: DEBUG nova.network.neutron [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Updating instance_info_cache with network_info: [{"id": "3b22879c-7b2b-4ab9-9749-21a544891657", "address": "fa:16:3e:41:07:89", "network": {"id": "6e3d7438-b94d-4438-b4e5-fde03770d05d", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-965044620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "37492ec68df9439e860aaacce6f58bcf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b22879c-7b", "ovs_interfaceid": "3b22879c-7b2b-4ab9-9749-21a544891657", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.953398] env[70020]: DEBUG oslo_vmware.api [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Task: {'id': task-3617899, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.648678} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.956715] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] c4335d00-29a3-4f2e-b826-1a78ef02e0bf/c4335d00-29a3-4f2e-b826-1a78ef02e0bf.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 686.956715] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 686.957049] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-64e15fe9-aacb-4f54-97d5-e422bbef39e0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.969811] env[70020]: DEBUG oslo_vmware.api [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Waiting for the task: (returnval){ [ 686.969811] env[70020]: value = "task-3617901" [ 686.969811] env[70020]: _type = "Task" [ 686.969811] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.982244] env[70020]: DEBUG oslo_vmware.api [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Task: {'id': task-3617901, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.065221] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d5536acf-40bb-48dd-9d7c-7e084a7ae9ec tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Creating Snapshot of the VM instance {{(pid=70020) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 687.065636] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-dadae7db-254b-4d02-b967-37030bb08afd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.075896] env[70020]: DEBUG oslo_vmware.api [None req-d5536acf-40bb-48dd-9d7c-7e084a7ae9ec tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for the task: (returnval){ [ 687.075896] env[70020]: value = "task-3617902" [ 687.075896] env[70020]: _type = "Task" [ 687.075896] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.086196] env[70020]: DEBUG oslo_vmware.api [None req-d5536acf-40bb-48dd-9d7c-7e084a7ae9ec tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3617902, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.091751] env[70020]: DEBUG nova.network.neutron [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Successfully created port: 2e160e96-59d2-4391-adfe-0ebb379762cd {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 687.172415] env[70020]: INFO nova.compute.resource_tracker [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Updating resource usage from migration 2d171af4-44c5-498a-a2f3-345479067b8a [ 687.178952] env[70020]: DEBUG nova.compute.manager [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 687.316768] env[70020]: DEBUG oslo_concurrency.lockutils [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Releasing lock "refresh_cache-f53cb08c-0939-4cb1-8476-8b289d6a1b05" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 687.316768] env[70020]: DEBUG nova.compute.manager [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Instance network_info: |[{"id": "3b22879c-7b2b-4ab9-9749-21a544891657", "address": "fa:16:3e:41:07:89", "network": {"id": "6e3d7438-b94d-4438-b4e5-fde03770d05d", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-965044620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "37492ec68df9439e860aaacce6f58bcf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b22879c-7b", "ovs_interfaceid": "3b22879c-7b2b-4ab9-9749-21a544891657", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 687.317405] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:41:07:89', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73915082-a1b0-460b-b24d-97588fc9cb29', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3b22879c-7b2b-4ab9-9749-21a544891657', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 687.327409] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Creating folder: Project (37492ec68df9439e860aaacce6f58bcf). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 687.328591] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-075dc697-aac5-4e91-84d7-b3f490e1f53f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.349976] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Created folder: Project (37492ec68df9439e860aaacce6f58bcf) in parent group-v721521. [ 687.350924] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Creating folder: Instances. Parent ref: group-v721609. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 687.350924] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0e3edccc-854f-4ad0-90b2-ab2ab39ff698 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.369018] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Created folder: Instances in parent group-v721609. [ 687.369018] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 687.369018] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 687.369018] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f218751e-79a1-4968-83d5-43f0b59618f2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.395381] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 687.395381] env[70020]: value = "task-3617905" [ 687.395381] env[70020]: _type = "Task" [ 687.395381] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.410985] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617905, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.486549] env[70020]: DEBUG oslo_vmware.api [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Task: {'id': task-3617901, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072413} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.489864] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 687.491611] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-065ad356-9d13-485b-8002-bd747e8d27a3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.528164] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Reconfiguring VM instance instance-00000019 to attach disk [datastore1] c4335d00-29a3-4f2e-b826-1a78ef02e0bf/c4335d00-29a3-4f2e-b826-1a78ef02e0bf.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 687.532067] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b38851e-5a4a-43ab-b792-ed2c8d4aa7a7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.555007] env[70020]: DEBUG oslo_vmware.api [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Waiting for the task: (returnval){ [ 687.555007] env[70020]: value = "task-3617906" [ 687.555007] env[70020]: _type = "Task" [ 687.555007] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.570786] env[70020]: DEBUG oslo_vmware.api [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Task: {'id': task-3617906, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.590749] env[70020]: DEBUG oslo_vmware.api [None req-d5536acf-40bb-48dd-9d7c-7e084a7ae9ec tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3617902, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.896387] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-563eb756-4d30-4c8b-ae84-9e0e5927d664 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.912144] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ebae4e-4f22-401d-ba22-4e89f5e48fb9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.915891] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617905, 'name': CreateVM_Task, 'duration_secs': 0.445656} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.916138] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 687.917715] env[70020]: DEBUG oslo_concurrency.lockutils [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.917796] env[70020]: DEBUG oslo_concurrency.lockutils [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 687.920050] env[70020]: DEBUG oslo_concurrency.lockutils [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 687.920396] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be7a8a8f-d974-473d-965c-017abbd3f390 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.959880] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a452fec7-aa42-4c2a-b574-34ce8b635eb4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.964136] env[70020]: DEBUG oslo_vmware.api [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Waiting for the task: (returnval){ [ 687.964136] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]526ef874-12bc-c934-1f92-19dc0e04aeba" [ 687.964136] env[70020]: _type = "Task" [ 687.964136] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.973032] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ea588fc-f10f-4e1d-9f06-d4b3ea2520de {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.986728] env[70020]: DEBUG oslo_vmware.api [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]526ef874-12bc-c934-1f92-19dc0e04aeba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.003128] env[70020]: DEBUG nova.compute.provider_tree [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 688.069041] env[70020]: DEBUG oslo_vmware.api [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Task: {'id': task-3617906, 'name': ReconfigVM_Task, 'duration_secs': 0.43351} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.069316] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Reconfigured VM instance instance-00000019 to attach disk [datastore1] c4335d00-29a3-4f2e-b826-1a78ef02e0bf/c4335d00-29a3-4f2e-b826-1a78ef02e0bf.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 688.072229] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-281788f6-371d-46ae-bca2-2a4dacbf37fd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.078522] env[70020]: DEBUG oslo_vmware.api [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Waiting for the task: (returnval){ [ 688.078522] env[70020]: value = "task-3617907" [ 688.078522] env[70020]: _type = "Task" [ 688.078522] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.091109] env[70020]: DEBUG oslo_vmware.api [None req-d5536acf-40bb-48dd-9d7c-7e084a7ae9ec tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3617902, 'name': CreateSnapshot_Task, 'duration_secs': 0.957313} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.099505] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d5536acf-40bb-48dd-9d7c-7e084a7ae9ec tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Created Snapshot of the VM instance {{(pid=70020) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 688.101261] env[70020]: DEBUG oslo_vmware.api [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Task: {'id': task-3617907, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.102210] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79db7d80-0e83-49e4-a510-bad52648858e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.192168] env[70020]: DEBUG nova.compute.manager [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 688.231917] env[70020]: DEBUG nova.virt.hardware [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 688.231917] env[70020]: DEBUG nova.virt.hardware [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 688.232047] env[70020]: DEBUG nova.virt.hardware [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 688.232156] env[70020]: DEBUG nova.virt.hardware [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 688.232370] env[70020]: DEBUG nova.virt.hardware [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 688.232519] env[70020]: DEBUG nova.virt.hardware [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 688.232797] env[70020]: DEBUG nova.virt.hardware [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 688.233024] env[70020]: DEBUG nova.virt.hardware [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 688.233222] env[70020]: DEBUG nova.virt.hardware [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 688.233419] env[70020]: DEBUG nova.virt.hardware [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 688.233627] env[70020]: DEBUG nova.virt.hardware [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 688.234550] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95747b64-3bc1-47c2-874e-c3aca5a270e4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.243367] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d4489d9-1a1a-4951-a894-b3c36e0d2576 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.474899] env[70020]: DEBUG nova.compute.manager [req-c86c5ae7-d73f-4b3e-8606-36efe441eea9 req-65eb0579-6b0d-45a7-b521-e9932086b238 service nova] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Received event network-vif-plugged-3b22879c-7b2b-4ab9-9749-21a544891657 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 688.475179] env[70020]: DEBUG oslo_concurrency.lockutils [req-c86c5ae7-d73f-4b3e-8606-36efe441eea9 req-65eb0579-6b0d-45a7-b521-e9932086b238 service nova] Acquiring lock "f53cb08c-0939-4cb1-8476-8b289d6a1b05-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 688.475475] env[70020]: DEBUG oslo_concurrency.lockutils [req-c86c5ae7-d73f-4b3e-8606-36efe441eea9 req-65eb0579-6b0d-45a7-b521-e9932086b238 service nova] Lock "f53cb08c-0939-4cb1-8476-8b289d6a1b05-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 688.475653] env[70020]: DEBUG oslo_concurrency.lockutils [req-c86c5ae7-d73f-4b3e-8606-36efe441eea9 req-65eb0579-6b0d-45a7-b521-e9932086b238 service nova] Lock "f53cb08c-0939-4cb1-8476-8b289d6a1b05-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 688.475933] env[70020]: DEBUG nova.compute.manager [req-c86c5ae7-d73f-4b3e-8606-36efe441eea9 req-65eb0579-6b0d-45a7-b521-e9932086b238 service nova] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] No waiting events found dispatching network-vif-plugged-3b22879c-7b2b-4ab9-9749-21a544891657 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 688.476236] env[70020]: WARNING nova.compute.manager [req-c86c5ae7-d73f-4b3e-8606-36efe441eea9 req-65eb0579-6b0d-45a7-b521-e9932086b238 service nova] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Received unexpected event network-vif-plugged-3b22879c-7b2b-4ab9-9749-21a544891657 for instance with vm_state building and task_state spawning. [ 688.476515] env[70020]: DEBUG nova.compute.manager [req-c86c5ae7-d73f-4b3e-8606-36efe441eea9 req-65eb0579-6b0d-45a7-b521-e9932086b238 service nova] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Received event network-changed-3b22879c-7b2b-4ab9-9749-21a544891657 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 688.476730] env[70020]: DEBUG nova.compute.manager [req-c86c5ae7-d73f-4b3e-8606-36efe441eea9 req-65eb0579-6b0d-45a7-b521-e9932086b238 service nova] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Refreshing instance network info cache due to event network-changed-3b22879c-7b2b-4ab9-9749-21a544891657. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 688.476934] env[70020]: DEBUG oslo_concurrency.lockutils [req-c86c5ae7-d73f-4b3e-8606-36efe441eea9 req-65eb0579-6b0d-45a7-b521-e9932086b238 service nova] Acquiring lock "refresh_cache-f53cb08c-0939-4cb1-8476-8b289d6a1b05" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 688.477090] env[70020]: DEBUG oslo_concurrency.lockutils [req-c86c5ae7-d73f-4b3e-8606-36efe441eea9 req-65eb0579-6b0d-45a7-b521-e9932086b238 service nova] Acquired lock "refresh_cache-f53cb08c-0939-4cb1-8476-8b289d6a1b05" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 688.477335] env[70020]: DEBUG nova.network.neutron [req-c86c5ae7-d73f-4b3e-8606-36efe441eea9 req-65eb0579-6b0d-45a7-b521-e9932086b238 service nova] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Refreshing network info cache for port 3b22879c-7b2b-4ab9-9749-21a544891657 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 688.483164] env[70020]: DEBUG oslo_vmware.api [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]526ef874-12bc-c934-1f92-19dc0e04aeba, 'name': SearchDatastore_Task, 'duration_secs': 0.020175} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.483915] env[70020]: DEBUG oslo_concurrency.lockutils [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 688.484274] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 688.484656] env[70020]: DEBUG oslo_concurrency.lockutils [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 688.486148] env[70020]: DEBUG oslo_concurrency.lockutils [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 688.486148] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 688.486148] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8d776126-900d-4e19-a94c-0e252a709c7c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.498070] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 688.498330] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 688.499026] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-448ec868-fe3d-45f0-b5ab-730feab96705 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.507414] env[70020]: DEBUG nova.scheduler.client.report [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 688.514264] env[70020]: DEBUG oslo_vmware.api [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Waiting for the task: (returnval){ [ 688.514264] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]524b89f3-ac12-7b0c-ffcd-c4c70edd0381" [ 688.514264] env[70020]: _type = "Task" [ 688.514264] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.523217] env[70020]: DEBUG oslo_vmware.api [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]524b89f3-ac12-7b0c-ffcd-c4c70edd0381, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.593627] env[70020]: DEBUG oslo_vmware.api [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Task: {'id': task-3617907, 'name': Rename_Task, 'duration_secs': 0.284016} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.594319] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 688.594669] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9aef6c2a-e5e1-413b-a8b3-35a77d892a43 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.603476] env[70020]: DEBUG oslo_vmware.api [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Waiting for the task: (returnval){ [ 688.603476] env[70020]: value = "task-3617908" [ 688.603476] env[70020]: _type = "Task" [ 688.603476] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.626940] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d5536acf-40bb-48dd-9d7c-7e084a7ae9ec tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Creating linked-clone VM from snapshot {{(pid=70020) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 688.627458] env[70020]: DEBUG oslo_vmware.api [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Task: {'id': task-3617908, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.628189] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-90aabf45-d806-4c2b-a464-d87b47d2f347 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.640031] env[70020]: DEBUG oslo_vmware.api [None req-d5536acf-40bb-48dd-9d7c-7e084a7ae9ec tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for the task: (returnval){ [ 688.640031] env[70020]: value = "task-3617909" [ 688.640031] env[70020]: _type = "Task" [ 688.640031] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.651202] env[70020]: DEBUG oslo_vmware.api [None req-d5536acf-40bb-48dd-9d7c-7e084a7ae9ec tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3617909, 'name': CloneVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.918798] env[70020]: DEBUG nova.network.neutron [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Successfully updated port: 2e160e96-59d2-4391-adfe-0ebb379762cd {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 688.936269] env[70020]: DEBUG nova.compute.manager [req-a701ba1e-603b-4fe2-a6a7-f530947a8881 req-cbbbdd84-17b3-47e2-a84f-1c8b95ce5ca8 service nova] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Received event network-vif-plugged-2e160e96-59d2-4391-adfe-0ebb379762cd {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 688.936389] env[70020]: DEBUG oslo_concurrency.lockutils [req-a701ba1e-603b-4fe2-a6a7-f530947a8881 req-cbbbdd84-17b3-47e2-a84f-1c8b95ce5ca8 service nova] Acquiring lock "0caa6acd-29d4-43ee-8b32-5149462dfc1c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 688.936665] env[70020]: DEBUG oslo_concurrency.lockutils [req-a701ba1e-603b-4fe2-a6a7-f530947a8881 req-cbbbdd84-17b3-47e2-a84f-1c8b95ce5ca8 service nova] Lock "0caa6acd-29d4-43ee-8b32-5149462dfc1c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 688.936954] env[70020]: DEBUG oslo_concurrency.lockutils [req-a701ba1e-603b-4fe2-a6a7-f530947a8881 req-cbbbdd84-17b3-47e2-a84f-1c8b95ce5ca8 service nova] Lock "0caa6acd-29d4-43ee-8b32-5149462dfc1c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 688.937322] env[70020]: DEBUG nova.compute.manager [req-a701ba1e-603b-4fe2-a6a7-f530947a8881 req-cbbbdd84-17b3-47e2-a84f-1c8b95ce5ca8 service nova] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] No waiting events found dispatching network-vif-plugged-2e160e96-59d2-4391-adfe-0ebb379762cd {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 688.937537] env[70020]: WARNING nova.compute.manager [req-a701ba1e-603b-4fe2-a6a7-f530947a8881 req-cbbbdd84-17b3-47e2-a84f-1c8b95ce5ca8 service nova] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Received unexpected event network-vif-plugged-2e160e96-59d2-4391-adfe-0ebb379762cd for instance with vm_state building and task_state spawning. [ 689.014464] env[70020]: DEBUG oslo_concurrency.lockutils [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.863s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 689.014671] env[70020]: INFO nova.compute.manager [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Migrating [ 689.014896] env[70020]: DEBUG oslo_concurrency.lockutils [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 689.015038] env[70020]: DEBUG oslo_concurrency.lockutils [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquired lock "compute-rpcapi-router" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 689.019538] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.327s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 689.022071] env[70020]: INFO nova.compute.claims [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 689.031900] env[70020]: INFO nova.compute.rpcapi [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Automatically selected compute RPC version 6.4 from minimum service version 68 [ 689.032773] env[70020]: DEBUG oslo_concurrency.lockutils [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Releasing lock "compute-rpcapi-router" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 689.061907] env[70020]: DEBUG oslo_vmware.api [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]524b89f3-ac12-7b0c-ffcd-c4c70edd0381, 'name': SearchDatastore_Task, 'duration_secs': 0.017614} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.064346] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-306b8198-8808-4b22-8ec9-34ffe8c62a57 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.073431] env[70020]: DEBUG oslo_vmware.api [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Waiting for the task: (returnval){ [ 689.073431] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]522b38ce-2c0b-18f7-7fc5-454adbe62b53" [ 689.073431] env[70020]: _type = "Task" [ 689.073431] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.092110] env[70020]: DEBUG oslo_vmware.api [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]522b38ce-2c0b-18f7-7fc5-454adbe62b53, 'name': SearchDatastore_Task, 'duration_secs': 0.014914} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.092392] env[70020]: DEBUG oslo_concurrency.lockutils [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 689.092888] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] f53cb08c-0939-4cb1-8476-8b289d6a1b05/f53cb08c-0939-4cb1-8476-8b289d6a1b05.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 689.093529] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bef13072-dfaf-4f3c-83fb-2c086a4b15e7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.102836] env[70020]: DEBUG oslo_vmware.api [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Waiting for the task: (returnval){ [ 689.102836] env[70020]: value = "task-3617910" [ 689.102836] env[70020]: _type = "Task" [ 689.102836] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.124421] env[70020]: DEBUG oslo_vmware.api [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Task: {'id': task-3617910, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.124667] env[70020]: DEBUG oslo_vmware.api [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Task: {'id': task-3617908, 'name': PowerOnVM_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.151479] env[70020]: DEBUG oslo_vmware.api [None req-d5536acf-40bb-48dd-9d7c-7e084a7ae9ec tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3617909, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.323152] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6877f9d3-19d9-40c3-9940-10e23bd854cc tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Acquiring lock "7cf7f0a9-8240-4e78-b5d4-b1eb1da60764" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 689.323152] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6877f9d3-19d9-40c3-9940-10e23bd854cc tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Lock "7cf7f0a9-8240-4e78-b5d4-b1eb1da60764" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 689.323152] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6877f9d3-19d9-40c3-9940-10e23bd854cc tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Acquiring lock "7cf7f0a9-8240-4e78-b5d4-b1eb1da60764-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 689.323152] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6877f9d3-19d9-40c3-9940-10e23bd854cc tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Lock "7cf7f0a9-8240-4e78-b5d4-b1eb1da60764-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 689.323513] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6877f9d3-19d9-40c3-9940-10e23bd854cc tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Lock "7cf7f0a9-8240-4e78-b5d4-b1eb1da60764-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 689.324950] env[70020]: INFO nova.compute.manager [None req-6877f9d3-19d9-40c3-9940-10e23bd854cc tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Terminating instance [ 689.356175] env[70020]: DEBUG nova.network.neutron [req-c86c5ae7-d73f-4b3e-8606-36efe441eea9 req-65eb0579-6b0d-45a7-b521-e9932086b238 service nova] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Updated VIF entry in instance network info cache for port 3b22879c-7b2b-4ab9-9749-21a544891657. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 689.356630] env[70020]: DEBUG nova.network.neutron [req-c86c5ae7-d73f-4b3e-8606-36efe441eea9 req-65eb0579-6b0d-45a7-b521-e9932086b238 service nova] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Updating instance_info_cache with network_info: [{"id": "3b22879c-7b2b-4ab9-9749-21a544891657", "address": "fa:16:3e:41:07:89", "network": {"id": "6e3d7438-b94d-4438-b4e5-fde03770d05d", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-965044620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "37492ec68df9439e860aaacce6f58bcf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b22879c-7b", "ovs_interfaceid": "3b22879c-7b2b-4ab9-9749-21a544891657", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 689.422453] env[70020]: DEBUG oslo_concurrency.lockutils [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquiring lock "refresh_cache-0caa6acd-29d4-43ee-8b32-5149462dfc1c" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 689.423195] env[70020]: DEBUG oslo_concurrency.lockutils [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquired lock "refresh_cache-0caa6acd-29d4-43ee-8b32-5149462dfc1c" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 689.423738] env[70020]: DEBUG nova.network.neutron [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 689.563853] env[70020]: DEBUG oslo_concurrency.lockutils [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquiring lock "refresh_cache-4b5750d4-98ec-4c70-b214-fad97060b606" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 689.564197] env[70020]: DEBUG oslo_concurrency.lockutils [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquired lock "refresh_cache-4b5750d4-98ec-4c70-b214-fad97060b606" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 689.564294] env[70020]: DEBUG nova.network.neutron [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 689.620888] env[70020]: DEBUG oslo_vmware.api [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Task: {'id': task-3617910, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.629461] env[70020]: DEBUG oslo_vmware.api [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Task: {'id': task-3617908, 'name': PowerOnVM_Task, 'duration_secs': 0.77685} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.629831] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 689.629940] env[70020]: INFO nova.compute.manager [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Took 8.88 seconds to spawn the instance on the hypervisor. [ 689.630257] env[70020]: DEBUG nova.compute.manager [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 689.631470] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-139de354-8f19-4cf9-9a4b-ea0444b2947d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.653565] env[70020]: DEBUG oslo_vmware.api [None req-d5536acf-40bb-48dd-9d7c-7e084a7ae9ec tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3617909, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.833793] env[70020]: DEBUG nova.compute.manager [None req-6877f9d3-19d9-40c3-9940-10e23bd854cc tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 689.834116] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6877f9d3-19d9-40c3-9940-10e23bd854cc tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 689.835039] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ab30a45-94ce-44d9-906a-f9f8a47e01e4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.844807] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-6877f9d3-19d9-40c3-9940-10e23bd854cc tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 689.845087] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-151d6bdd-0d05-4b28-9d1b-5dafd6aba6b6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.855766] env[70020]: DEBUG oslo_vmware.api [None req-6877f9d3-19d9-40c3-9940-10e23bd854cc tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Waiting for the task: (returnval){ [ 689.855766] env[70020]: value = "task-3617911" [ 689.855766] env[70020]: _type = "Task" [ 689.855766] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.859921] env[70020]: DEBUG oslo_concurrency.lockutils [req-c86c5ae7-d73f-4b3e-8606-36efe441eea9 req-65eb0579-6b0d-45a7-b521-e9932086b238 service nova] Releasing lock "refresh_cache-f53cb08c-0939-4cb1-8476-8b289d6a1b05" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 689.860252] env[70020]: DEBUG nova.compute.manager [req-c86c5ae7-d73f-4b3e-8606-36efe441eea9 req-65eb0579-6b0d-45a7-b521-e9932086b238 service nova] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Received event network-changed-4e709a63-45c3-48e8-8762-26e149c61266 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 689.860443] env[70020]: DEBUG nova.compute.manager [req-c86c5ae7-d73f-4b3e-8606-36efe441eea9 req-65eb0579-6b0d-45a7-b521-e9932086b238 service nova] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Refreshing instance network info cache due to event network-changed-4e709a63-45c3-48e8-8762-26e149c61266. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 689.860803] env[70020]: DEBUG oslo_concurrency.lockutils [req-c86c5ae7-d73f-4b3e-8606-36efe441eea9 req-65eb0579-6b0d-45a7-b521-e9932086b238 service nova] Acquiring lock "refresh_cache-d601179a-df77-4f2e-b8df-9185b8a485e3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 689.861021] env[70020]: DEBUG oslo_concurrency.lockutils [req-c86c5ae7-d73f-4b3e-8606-36efe441eea9 req-65eb0579-6b0d-45a7-b521-e9932086b238 service nova] Acquired lock "refresh_cache-d601179a-df77-4f2e-b8df-9185b8a485e3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 689.861280] env[70020]: DEBUG nova.network.neutron [req-c86c5ae7-d73f-4b3e-8606-36efe441eea9 req-65eb0579-6b0d-45a7-b521-e9932086b238 service nova] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Refreshing network info cache for port 4e709a63-45c3-48e8-8762-26e149c61266 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 689.870402] env[70020]: DEBUG oslo_vmware.api [None req-6877f9d3-19d9-40c3-9940-10e23bd854cc tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': task-3617911, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.959301] env[70020]: DEBUG nova.network.neutron [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 690.115995] env[70020]: DEBUG oslo_vmware.api [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Task: {'id': task-3617910, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.607592} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.116672] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] f53cb08c-0939-4cb1-8476-8b289d6a1b05/f53cb08c-0939-4cb1-8476-8b289d6a1b05.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 690.116996] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 690.117344] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-56e632e2-1d77-421a-8764-cc243f6f707d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.122855] env[70020]: DEBUG nova.network.neutron [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Updating instance_info_cache with network_info: [{"id": "2e160e96-59d2-4391-adfe-0ebb379762cd", "address": "fa:16:3e:c4:78:cf", "network": {"id": "943dd639-07d5-4e71-8d72-01499704fb50", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-408902375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3a2dc07c1d447ea81ca142d80ab4210", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e160e96-59", "ovs_interfaceid": "2e160e96-59d2-4391-adfe-0ebb379762cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 690.133023] env[70020]: DEBUG oslo_vmware.api [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Waiting for the task: (returnval){ [ 690.133023] env[70020]: value = "task-3617912" [ 690.133023] env[70020]: _type = "Task" [ 690.133023] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.145513] env[70020]: DEBUG oslo_vmware.api [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Task: {'id': task-3617912, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.163142] env[70020]: INFO nova.compute.manager [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Took 37.64 seconds to build instance. [ 690.169816] env[70020]: DEBUG oslo_vmware.api [None req-d5536acf-40bb-48dd-9d7c-7e084a7ae9ec tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3617909, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.369709] env[70020]: DEBUG oslo_vmware.api [None req-6877f9d3-19d9-40c3-9940-10e23bd854cc tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': task-3617911, 'name': PowerOffVM_Task, 'duration_secs': 0.22685} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.372212] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-6877f9d3-19d9-40c3-9940-10e23bd854cc tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 690.372394] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6877f9d3-19d9-40c3-9940-10e23bd854cc tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 690.378023] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8501b958-ac86-49f8-b292-0a41912739ce {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.469257] env[70020]: DEBUG nova.network.neutron [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Updating instance_info_cache with network_info: [{"id": "b1d9f41a-978e-4fe2-bb42-1a2ab68ce1b2", "address": "fa:16:3e:a9:91:ab", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.238", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1d9f41a-97", "ovs_interfaceid": "b1d9f41a-978e-4fe2-bb42-1a2ab68ce1b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 690.630157] env[70020]: DEBUG nova.network.neutron [req-c86c5ae7-d73f-4b3e-8606-36efe441eea9 req-65eb0579-6b0d-45a7-b521-e9932086b238 service nova] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Updated VIF entry in instance network info cache for port 4e709a63-45c3-48e8-8762-26e149c61266. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 690.630157] env[70020]: DEBUG nova.network.neutron [req-c86c5ae7-d73f-4b3e-8606-36efe441eea9 req-65eb0579-6b0d-45a7-b521-e9932086b238 service nova] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Updating instance_info_cache with network_info: [{"id": "4e709a63-45c3-48e8-8762-26e149c61266", "address": "fa:16:3e:ef:1c:11", "network": {"id": "b284478c-c5d9-48ae-8f9e-db4ed9b7d525", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-354423789-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "add37b0346e74e7f9724e69253e2cffc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b931c4c-f73c-4fbd-9c9f-0270834cc69e", "external-id": "nsx-vlan-transportzone-177", "segmentation_id": 177, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e709a63-45", "ovs_interfaceid": "4e709a63-45c3-48e8-8762-26e149c61266", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 690.630536] env[70020]: DEBUG oslo_concurrency.lockutils [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Releasing lock "refresh_cache-0caa6acd-29d4-43ee-8b32-5149462dfc1c" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 690.630536] env[70020]: DEBUG nova.compute.manager [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Instance network_info: |[{"id": "2e160e96-59d2-4391-adfe-0ebb379762cd", "address": "fa:16:3e:c4:78:cf", "network": {"id": "943dd639-07d5-4e71-8d72-01499704fb50", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-408902375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3a2dc07c1d447ea81ca142d80ab4210", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e160e96-59", "ovs_interfaceid": "2e160e96-59d2-4391-adfe-0ebb379762cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 690.631308] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c4:78:cf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39ab9baf-90cd-4fe2-8d56-434f8210fc19', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2e160e96-59d2-4391-adfe-0ebb379762cd', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 690.640474] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 690.644086] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 690.648617] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a5f9b7c1-185d-4f17-bb82-a4df67774f50 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.671432] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cfd6fbd-cf49-4d0b-b1fa-e96757b772a0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.676169] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2282cce1-55ac-4c4f-9b3e-4d6732717da4 tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Lock "c4335d00-29a3-4f2e-b826-1a78ef02e0bf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.169s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 690.681925] env[70020]: DEBUG oslo_vmware.api [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Task: {'id': task-3617912, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077495} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.688028] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 690.688165] env[70020]: DEBUG oslo_vmware.api [None req-d5536acf-40bb-48dd-9d7c-7e084a7ae9ec tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3617909, 'name': CloneVM_Task, 'duration_secs': 1.857237} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.690780] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a244fde8-548c-425f-b979-ba8f1a63e599 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.693570] env[70020]: INFO nova.virt.vmwareapi.vmops [None req-d5536acf-40bb-48dd-9d7c-7e084a7ae9ec tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Created linked-clone VM from snapshot [ 690.694301] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 690.694301] env[70020]: value = "task-3617914" [ 690.694301] env[70020]: _type = "Task" [ 690.694301] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.697109] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caa38f56-8025-4778-aa97-97c3cc1710a0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.705340] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5e24620-a57c-4350-ac32-8ecc2b44c5ec {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.727310] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Reconfiguring VM instance instance-0000001a to attach disk [datastore1] f53cb08c-0939-4cb1-8476-8b289d6a1b05/f53cb08c-0939-4cb1-8476-8b289d6a1b05.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 690.732922] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aefd5122-30c9-44a3-8a17-aa0a3d294298 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.746923] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-d5536acf-40bb-48dd-9d7c-7e084a7ae9ec tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Uploading image e5e3dbec-e876-4f4d-a3ae-25f8fe219632 {{(pid=70020) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 690.777924] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617914, 'name': CreateVM_Task} progress is 15%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.779164] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5536acf-40bb-48dd-9d7c-7e084a7ae9ec tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Destroying the VM {{(pid=70020) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 690.782633] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26c32600-5468-46c3-9b11-1e06958d746d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.784451] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-c9269118-2c2c-426e-b8ba-89fc0bd76b94 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.790315] env[70020]: DEBUG oslo_vmware.api [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Waiting for the task: (returnval){ [ 690.790315] env[70020]: value = "task-3617915" [ 690.790315] env[70020]: _type = "Task" [ 690.790315] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.801543] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08efc776-2662-406f-a11a-31502110c3e7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.805671] env[70020]: DEBUG oslo_vmware.api [None req-d5536acf-40bb-48dd-9d7c-7e084a7ae9ec tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for the task: (returnval){ [ 690.805671] env[70020]: value = "task-3617916" [ 690.805671] env[70020]: _type = "Task" [ 690.805671] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.810375] env[70020]: DEBUG oslo_vmware.api [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Task: {'id': task-3617915, 'name': ReconfigVM_Task} progress is 10%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.822476] env[70020]: DEBUG nova.compute.provider_tree [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 690.829632] env[70020]: DEBUG oslo_vmware.api [None req-d5536acf-40bb-48dd-9d7c-7e084a7ae9ec tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3617916, 'name': Destroy_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.928062] env[70020]: DEBUG nova.compute.manager [req-673c2376-db30-4b25-8f4b-1de47a4ff8bf req-f6fd6289-7d87-436f-b261-586054487ded service nova] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Received event network-changed-b2ae7c4e-fcb4-4d62-9ff1-82de773af513 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 690.928316] env[70020]: DEBUG nova.compute.manager [req-673c2376-db30-4b25-8f4b-1de47a4ff8bf req-f6fd6289-7d87-436f-b261-586054487ded service nova] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Refreshing instance network info cache due to event network-changed-b2ae7c4e-fcb4-4d62-9ff1-82de773af513. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 690.928719] env[70020]: DEBUG oslo_concurrency.lockutils [req-673c2376-db30-4b25-8f4b-1de47a4ff8bf req-f6fd6289-7d87-436f-b261-586054487ded service nova] Acquiring lock "refresh_cache-50ce7a0c-aa80-4816-b84e-d8ff7b10fffb" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 690.928942] env[70020]: DEBUG oslo_concurrency.lockutils [req-673c2376-db30-4b25-8f4b-1de47a4ff8bf req-f6fd6289-7d87-436f-b261-586054487ded service nova] Acquired lock "refresh_cache-50ce7a0c-aa80-4816-b84e-d8ff7b10fffb" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 690.929199] env[70020]: DEBUG nova.network.neutron [req-673c2376-db30-4b25-8f4b-1de47a4ff8bf req-f6fd6289-7d87-436f-b261-586054487ded service nova] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Refreshing network info cache for port b2ae7c4e-fcb4-4d62-9ff1-82de773af513 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 690.973303] env[70020]: DEBUG oslo_concurrency.lockutils [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Releasing lock "refresh_cache-4b5750d4-98ec-4c70-b214-fad97060b606" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 691.141731] env[70020]: DEBUG oslo_concurrency.lockutils [req-c86c5ae7-d73f-4b3e-8606-36efe441eea9 req-65eb0579-6b0d-45a7-b521-e9932086b238 service nova] Releasing lock "refresh_cache-d601179a-df77-4f2e-b8df-9185b8a485e3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 691.143450] env[70020]: DEBUG nova.compute.manager [req-c86c5ae7-d73f-4b3e-8606-36efe441eea9 req-65eb0579-6b0d-45a7-b521-e9932086b238 service nova] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Received event network-changed-b2ae7c4e-fcb4-4d62-9ff1-82de773af513 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 691.143450] env[70020]: DEBUG nova.compute.manager [req-c86c5ae7-d73f-4b3e-8606-36efe441eea9 req-65eb0579-6b0d-45a7-b521-e9932086b238 service nova] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Refreshing instance network info cache due to event network-changed-b2ae7c4e-fcb4-4d62-9ff1-82de773af513. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 691.143450] env[70020]: DEBUG oslo_concurrency.lockutils [req-c86c5ae7-d73f-4b3e-8606-36efe441eea9 req-65eb0579-6b0d-45a7-b521-e9932086b238 service nova] Acquiring lock "refresh_cache-50ce7a0c-aa80-4816-b84e-d8ff7b10fffb" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.188851] env[70020]: DEBUG nova.compute.manager [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 691.210021] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6877f9d3-19d9-40c3-9940-10e23bd854cc tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 691.210021] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6877f9d3-19d9-40c3-9940-10e23bd854cc tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 691.210021] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-6877f9d3-19d9-40c3-9940-10e23bd854cc tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Deleting the datastore file [datastore1] 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 691.212295] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-39ce2f4c-7e89-42e3-96b7-106d03aea932 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.214785] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617914, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.222017] env[70020]: DEBUG oslo_vmware.api [None req-6877f9d3-19d9-40c3-9940-10e23bd854cc tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Waiting for the task: (returnval){ [ 691.222017] env[70020]: value = "task-3617917" [ 691.222017] env[70020]: _type = "Task" [ 691.222017] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.233889] env[70020]: DEBUG oslo_vmware.api [None req-6877f9d3-19d9-40c3-9940-10e23bd854cc tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': task-3617917, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.294659] env[70020]: DEBUG oslo_concurrency.lockutils [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Acquiring lock "50ce7a0c-aa80-4816-b84e-d8ff7b10fffb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 691.294659] env[70020]: DEBUG oslo_concurrency.lockutils [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Lock "50ce7a0c-aa80-4816-b84e-d8ff7b10fffb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 691.294659] env[70020]: DEBUG oslo_concurrency.lockutils [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Acquiring lock "50ce7a0c-aa80-4816-b84e-d8ff7b10fffb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 691.294659] env[70020]: DEBUG oslo_concurrency.lockutils [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Lock "50ce7a0c-aa80-4816-b84e-d8ff7b10fffb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 691.294864] env[70020]: DEBUG oslo_concurrency.lockutils [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Lock "50ce7a0c-aa80-4816-b84e-d8ff7b10fffb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 691.300103] env[70020]: INFO nova.compute.manager [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Terminating instance [ 691.309979] env[70020]: DEBUG oslo_vmware.api [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Task: {'id': task-3617915, 'name': ReconfigVM_Task, 'duration_secs': 0.494125} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.310717] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Reconfigured VM instance instance-0000001a to attach disk [datastore1] f53cb08c-0939-4cb1-8476-8b289d6a1b05/f53cb08c-0939-4cb1-8476-8b289d6a1b05.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 691.314947] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-87474d8c-a240-438f-b070-16989c4a2b16 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.323300] env[70020]: DEBUG oslo_vmware.api [None req-d5536acf-40bb-48dd-9d7c-7e084a7ae9ec tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3617916, 'name': Destroy_Task} progress is 33%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.324722] env[70020]: DEBUG oslo_vmware.api [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Waiting for the task: (returnval){ [ 691.324722] env[70020]: value = "task-3617918" [ 691.324722] env[70020]: _type = "Task" [ 691.324722] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.326101] env[70020]: DEBUG nova.scheduler.client.report [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 691.341592] env[70020]: DEBUG oslo_vmware.api [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Task: {'id': task-3617918, 'name': Rename_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.370530] env[70020]: DEBUG nova.compute.manager [req-5f38c140-7f10-4ad1-ac7e-1f1098071b4e req-77e004dc-ee0d-4442-9a8f-b98ab928b619 service nova] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Received event network-changed-2e160e96-59d2-4391-adfe-0ebb379762cd {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 691.370687] env[70020]: DEBUG nova.compute.manager [req-5f38c140-7f10-4ad1-ac7e-1f1098071b4e req-77e004dc-ee0d-4442-9a8f-b98ab928b619 service nova] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Refreshing instance network info cache due to event network-changed-2e160e96-59d2-4391-adfe-0ebb379762cd. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 691.370933] env[70020]: DEBUG oslo_concurrency.lockutils [req-5f38c140-7f10-4ad1-ac7e-1f1098071b4e req-77e004dc-ee0d-4442-9a8f-b98ab928b619 service nova] Acquiring lock "refresh_cache-0caa6acd-29d4-43ee-8b32-5149462dfc1c" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.371152] env[70020]: DEBUG oslo_concurrency.lockutils [req-5f38c140-7f10-4ad1-ac7e-1f1098071b4e req-77e004dc-ee0d-4442-9a8f-b98ab928b619 service nova] Acquired lock "refresh_cache-0caa6acd-29d4-43ee-8b32-5149462dfc1c" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 691.371483] env[70020]: DEBUG nova.network.neutron [req-5f38c140-7f10-4ad1-ac7e-1f1098071b4e req-77e004dc-ee0d-4442-9a8f-b98ab928b619 service nova] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Refreshing network info cache for port 2e160e96-59d2-4391-adfe-0ebb379762cd {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 691.691310] env[70020]: DEBUG nova.network.neutron [req-673c2376-db30-4b25-8f4b-1de47a4ff8bf req-f6fd6289-7d87-436f-b261-586054487ded service nova] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Updated VIF entry in instance network info cache for port b2ae7c4e-fcb4-4d62-9ff1-82de773af513. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 691.691703] env[70020]: DEBUG nova.network.neutron [req-673c2376-db30-4b25-8f4b-1de47a4ff8bf req-f6fd6289-7d87-436f-b261-586054487ded service nova] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Updating instance_info_cache with network_info: [{"id": "b2ae7c4e-fcb4-4d62-9ff1-82de773af513", "address": "fa:16:3e:33:a0:4d", "network": {"id": "b284478c-c5d9-48ae-8f9e-db4ed9b7d525", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-354423789-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "add37b0346e74e7f9724e69253e2cffc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b931c4c-f73c-4fbd-9c9f-0270834cc69e", "external-id": "nsx-vlan-transportzone-177", "segmentation_id": 177, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2ae7c4e-fc", "ovs_interfaceid": "b2ae7c4e-fcb4-4d62-9ff1-82de773af513", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 691.714101] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617914, 'name': CreateVM_Task, 'duration_secs': 0.544223} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.715157] env[70020]: DEBUG oslo_concurrency.lockutils [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 691.715418] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 691.716500] env[70020]: DEBUG oslo_concurrency.lockutils [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.716731] env[70020]: DEBUG oslo_concurrency.lockutils [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 691.720142] env[70020]: DEBUG oslo_concurrency.lockutils [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 691.720142] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be650bb9-d732-46c2-9467-44f08dce2e0a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.730761] env[70020]: DEBUG oslo_vmware.api [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 691.730761] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52569474-3234-0623-cd8e-7f6a35cce0b2" [ 691.730761] env[70020]: _type = "Task" [ 691.730761] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.741169] env[70020]: DEBUG oslo_vmware.api [None req-6877f9d3-19d9-40c3-9940-10e23bd854cc tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Task: {'id': task-3617917, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.398739} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.745122] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-6877f9d3-19d9-40c3-9940-10e23bd854cc tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 691.745684] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6877f9d3-19d9-40c3-9940-10e23bd854cc tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 691.745761] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6877f9d3-19d9-40c3-9940-10e23bd854cc tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 691.745932] env[70020]: INFO nova.compute.manager [None req-6877f9d3-19d9-40c3-9940-10e23bd854cc tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Took 1.91 seconds to destroy the instance on the hypervisor. [ 691.746243] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6877f9d3-19d9-40c3-9940-10e23bd854cc tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 691.746456] env[70020]: DEBUG oslo_vmware.api [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52569474-3234-0623-cd8e-7f6a35cce0b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.746838] env[70020]: DEBUG nova.compute.manager [-] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 691.746941] env[70020]: DEBUG nova.network.neutron [-] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 691.815335] env[70020]: DEBUG nova.compute.manager [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 691.815335] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 691.815695] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a6474e2-b598-4585-982d-b600e81ea20b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.833309] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 691.837322] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b20c2490-974d-428c-b953-15dd02907a4a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.838852] env[70020]: DEBUG oslo_vmware.api [None req-d5536acf-40bb-48dd-9d7c-7e084a7ae9ec tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3617916, 'name': Destroy_Task, 'duration_secs': 0.61293} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.839591] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.823s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 691.840164] env[70020]: DEBUG nova.compute.manager [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 691.842734] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-d5536acf-40bb-48dd-9d7c-7e084a7ae9ec tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Destroyed the VM [ 691.843933] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d5536acf-40bb-48dd-9d7c-7e084a7ae9ec tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Deleting Snapshot of the VM instance {{(pid=70020) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 691.843933] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.425s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 691.845071] env[70020]: INFO nova.compute.claims [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 691.848479] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a1f1ad33-d2ee-4871-899b-4e5c765beb23 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.855543] env[70020]: DEBUG oslo_vmware.api [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Task: {'id': task-3617918, 'name': Rename_Task, 'duration_secs': 0.337352} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.857053] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 691.857423] env[70020]: DEBUG oslo_vmware.api [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Waiting for the task: (returnval){ [ 691.857423] env[70020]: value = "task-3617919" [ 691.857423] env[70020]: _type = "Task" [ 691.857423] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.858909] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2575fbcb-8d65-405b-85b3-92fbf8ec5246 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.861074] env[70020]: DEBUG oslo_vmware.api [None req-d5536acf-40bb-48dd-9d7c-7e084a7ae9ec tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for the task: (returnval){ [ 691.861074] env[70020]: value = "task-3617920" [ 691.861074] env[70020]: _type = "Task" [ 691.861074] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.874667] env[70020]: DEBUG oslo_vmware.api [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Waiting for the task: (returnval){ [ 691.874667] env[70020]: value = "task-3617921" [ 691.874667] env[70020]: _type = "Task" [ 691.874667] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.887939] env[70020]: DEBUG oslo_vmware.api [None req-d5536acf-40bb-48dd-9d7c-7e084a7ae9ec tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3617920, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.888283] env[70020]: DEBUG oslo_vmware.api [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': task-3617919, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.898374] env[70020]: DEBUG oslo_vmware.api [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Task: {'id': task-3617921, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.197174] env[70020]: DEBUG oslo_concurrency.lockutils [req-673c2376-db30-4b25-8f4b-1de47a4ff8bf req-f6fd6289-7d87-436f-b261-586054487ded service nova] Releasing lock "refresh_cache-50ce7a0c-aa80-4816-b84e-d8ff7b10fffb" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 692.197642] env[70020]: DEBUG oslo_concurrency.lockutils [req-c86c5ae7-d73f-4b3e-8606-36efe441eea9 req-65eb0579-6b0d-45a7-b521-e9932086b238 service nova] Acquired lock "refresh_cache-50ce7a0c-aa80-4816-b84e-d8ff7b10fffb" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 692.197942] env[70020]: DEBUG nova.network.neutron [req-c86c5ae7-d73f-4b3e-8606-36efe441eea9 req-65eb0579-6b0d-45a7-b521-e9932086b238 service nova] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Refreshing network info cache for port b2ae7c4e-fcb4-4d62-9ff1-82de773af513 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 692.244740] env[70020]: DEBUG oslo_vmware.api [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52569474-3234-0623-cd8e-7f6a35cce0b2, 'name': SearchDatastore_Task, 'duration_secs': 0.038048} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.245493] env[70020]: DEBUG oslo_concurrency.lockutils [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 692.245766] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 692.246098] env[70020]: DEBUG oslo_concurrency.lockutils [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.246295] env[70020]: DEBUG oslo_concurrency.lockutils [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 692.246538] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 692.246931] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b23a234a-d047-4f3b-9db4-ff7a0cb808fa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.257744] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 692.258057] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 692.258774] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81868336-e3d9-495c-a7f9-61eb77c71bd4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.268537] env[70020]: DEBUG oslo_vmware.api [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 692.268537] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52ad0b2b-ca5a-7c7f-0ba4-5409317e6598" [ 692.268537] env[70020]: _type = "Task" [ 692.268537] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.279831] env[70020]: DEBUG oslo_vmware.api [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ad0b2b-ca5a-7c7f-0ba4-5409317e6598, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.340422] env[70020]: DEBUG nova.network.neutron [req-5f38c140-7f10-4ad1-ac7e-1f1098071b4e req-77e004dc-ee0d-4442-9a8f-b98ab928b619 service nova] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Updated VIF entry in instance network info cache for port 2e160e96-59d2-4391-adfe-0ebb379762cd. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 692.340831] env[70020]: DEBUG nova.network.neutron [req-5f38c140-7f10-4ad1-ac7e-1f1098071b4e req-77e004dc-ee0d-4442-9a8f-b98ab928b619 service nova] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Updating instance_info_cache with network_info: [{"id": "2e160e96-59d2-4391-adfe-0ebb379762cd", "address": "fa:16:3e:c4:78:cf", "network": {"id": "943dd639-07d5-4e71-8d72-01499704fb50", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-408902375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3a2dc07c1d447ea81ca142d80ab4210", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e160e96-59", "ovs_interfaceid": "2e160e96-59d2-4391-adfe-0ebb379762cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.344878] env[70020]: DEBUG nova.compute.utils [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 692.352029] env[70020]: DEBUG nova.compute.manager [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 692.352029] env[70020]: DEBUG nova.network.neutron [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 692.374814] env[70020]: DEBUG oslo_vmware.api [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': task-3617919, 'name': PowerOffVM_Task, 'duration_secs': 0.253544} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.375748] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 692.376077] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 692.381128] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ffe94cc8-60dc-4cd8-9b76-d42693c472aa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.386274] env[70020]: DEBUG oslo_vmware.api [None req-d5536acf-40bb-48dd-9d7c-7e084a7ae9ec tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3617920, 'name': RemoveSnapshot_Task} progress is 76%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.393219] env[70020]: DEBUG oslo_vmware.api [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Task: {'id': task-3617921, 'name': PowerOnVM_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.423838] env[70020]: DEBUG nova.policy [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b67375d5e85b4ba99d47120945bbf0f6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '52cd193f3ca7403a986d72f072590f4f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 692.456430] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 692.456723] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 692.456969] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Deleting the datastore file [datastore1] 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 692.457330] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d94b0a0c-d9f7-4b60-81a9-393c85afdcc0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.467741] env[70020]: DEBUG oslo_vmware.api [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Waiting for the task: (returnval){ [ 692.467741] env[70020]: value = "task-3617923" [ 692.467741] env[70020]: _type = "Task" [ 692.467741] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.480159] env[70020]: DEBUG oslo_vmware.api [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': task-3617923, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.490340] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b23b17ae-bcc5-4a3c-92f9-96518713263e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.512708] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Updating instance '4b5750d4-98ec-4c70-b214-fad97060b606' progress to 0 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 692.541121] env[70020]: DEBUG nova.network.neutron [-] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.796040] env[70020]: DEBUG oslo_vmware.api [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ad0b2b-ca5a-7c7f-0ba4-5409317e6598, 'name': SearchDatastore_Task, 'duration_secs': 0.014369} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.796817] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0f667c3-5dac-436f-ba54-d5bfd4b973ae {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.805950] env[70020]: DEBUG oslo_vmware.api [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 692.805950] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52bd6c1b-4da2-a03f-40a1-85abbd2126fd" [ 692.805950] env[70020]: _type = "Task" [ 692.805950] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.821801] env[70020]: DEBUG oslo_vmware.api [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52bd6c1b-4da2-a03f-40a1-85abbd2126fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.844293] env[70020]: DEBUG oslo_concurrency.lockutils [req-5f38c140-7f10-4ad1-ac7e-1f1098071b4e req-77e004dc-ee0d-4442-9a8f-b98ab928b619 service nova] Releasing lock "refresh_cache-0caa6acd-29d4-43ee-8b32-5149462dfc1c" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 692.849017] env[70020]: DEBUG nova.compute.manager [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 692.889053] env[70020]: DEBUG oslo_vmware.api [None req-d5536acf-40bb-48dd-9d7c-7e084a7ae9ec tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3617920, 'name': RemoveSnapshot_Task, 'duration_secs': 0.81572} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.898024] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d5536acf-40bb-48dd-9d7c-7e084a7ae9ec tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Deleted Snapshot of the VM instance {{(pid=70020) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 692.908642] env[70020]: DEBUG oslo_vmware.api [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Task: {'id': task-3617921, 'name': PowerOnVM_Task, 'duration_secs': 0.691921} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.911471] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 692.911826] env[70020]: INFO nova.compute.manager [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Took 9.51 seconds to spawn the instance on the hypervisor. [ 692.912157] env[70020]: DEBUG nova.compute.manager [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 692.914041] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a1ac42f-a69c-40b9-8055-987c56684883 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.979244] env[70020]: DEBUG oslo_vmware.api [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': task-3617923, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.216499} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.979244] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 692.979244] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 692.979244] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 692.979476] env[70020]: INFO nova.compute.manager [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Took 1.17 seconds to destroy the instance on the hypervisor. [ 692.979604] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 692.979737] env[70020]: DEBUG nova.compute.manager [-] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 692.979826] env[70020]: DEBUG nova.network.neutron [-] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 692.986419] env[70020]: DEBUG nova.network.neutron [req-c86c5ae7-d73f-4b3e-8606-36efe441eea9 req-65eb0579-6b0d-45a7-b521-e9932086b238 service nova] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Updated VIF entry in instance network info cache for port b2ae7c4e-fcb4-4d62-9ff1-82de773af513. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 692.986731] env[70020]: DEBUG nova.network.neutron [req-c86c5ae7-d73f-4b3e-8606-36efe441eea9 req-65eb0579-6b0d-45a7-b521-e9932086b238 service nova] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Updating instance_info_cache with network_info: [{"id": "b2ae7c4e-fcb4-4d62-9ff1-82de773af513", "address": "fa:16:3e:33:a0:4d", "network": {"id": "b284478c-c5d9-48ae-8f9e-db4ed9b7d525", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-354423789-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "add37b0346e74e7f9724e69253e2cffc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b931c4c-f73c-4fbd-9c9f-0270834cc69e", "external-id": "nsx-vlan-transportzone-177", "segmentation_id": 177, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2ae7c4e-fc", "ovs_interfaceid": "b2ae7c4e-fcb4-4d62-9ff1-82de773af513", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.019242] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 693.019541] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3e595234-1160-47ef-a437-04fdd8943840 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.027985] env[70020]: DEBUG oslo_vmware.api [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for the task: (returnval){ [ 693.027985] env[70020]: value = "task-3617924" [ 693.027985] env[70020]: _type = "Task" [ 693.027985] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.039221] env[70020]: DEBUG oslo_vmware.api [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3617924, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.042751] env[70020]: INFO nova.compute.manager [-] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Took 1.30 seconds to deallocate network for instance. [ 693.150388] env[70020]: DEBUG nova.network.neutron [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Successfully created port: dbdfc4ab-6655-403a-8fa4-9d2cd2e84728 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 693.237135] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Acquiring lock "c9ce57f3-f9a2-40aa-b7eb-403840c34304" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 693.237674] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Lock "c9ce57f3-f9a2-40aa-b7eb-403840c34304" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 693.323450] env[70020]: DEBUG oslo_vmware.api [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52bd6c1b-4da2-a03f-40a1-85abbd2126fd, 'name': SearchDatastore_Task, 'duration_secs': 0.0338} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.327181] env[70020]: DEBUG oslo_concurrency.lockutils [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 693.327744] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 0caa6acd-29d4-43ee-8b32-5149462dfc1c/0caa6acd-29d4-43ee-8b32-5149462dfc1c.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 693.328592] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8ee0344d-db11-4482-8867-bf754d151ec5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.339313] env[70020]: DEBUG oslo_vmware.api [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 693.339313] env[70020]: value = "task-3617925" [ 693.339313] env[70020]: _type = "Task" [ 693.339313] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.350962] env[70020]: DEBUG oslo_vmware.api [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3617925, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.413779] env[70020]: WARNING nova.compute.manager [None req-d5536acf-40bb-48dd-9d7c-7e084a7ae9ec tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Image not found during snapshot: nova.exception.ImageNotFound: Image e5e3dbec-e876-4f4d-a3ae-25f8fe219632 could not be found. [ 693.441707] env[70020]: INFO nova.compute.manager [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Took 40.31 seconds to build instance. [ 693.494414] env[70020]: DEBUG oslo_concurrency.lockutils [req-c86c5ae7-d73f-4b3e-8606-36efe441eea9 req-65eb0579-6b0d-45a7-b521-e9932086b238 service nova] Releasing lock "refresh_cache-50ce7a0c-aa80-4816-b84e-d8ff7b10fffb" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 693.511168] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-265dcea8-c206-40c2-8309-29cdcf5ba46d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.523062] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed2f3ef8-f933-4d5e-93b0-ea23a87820df {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.557661] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6877f9d3-19d9-40c3-9940-10e23bd854cc tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 693.559192] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3901df0e-be50-48f5-a094-b5f03670d4e9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.568423] env[70020]: DEBUG oslo_vmware.api [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3617924, 'name': PowerOffVM_Task, 'duration_secs': 0.315636} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.568423] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 693.568423] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Updating instance '4b5750d4-98ec-4c70-b214-fad97060b606' progress to 17 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 693.580809] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d1f19b6-0146-444c-ab38-372f1f4c793c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.598670] env[70020]: DEBUG nova.compute.provider_tree [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 693.816376] env[70020]: DEBUG nova.network.neutron [-] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.850389] env[70020]: DEBUG oslo_vmware.api [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3617925, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.858888] env[70020]: DEBUG nova.compute.manager [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 693.890496] env[70020]: DEBUG nova.virt.hardware [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 693.890741] env[70020]: DEBUG nova.virt.hardware [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 693.890912] env[70020]: DEBUG nova.virt.hardware [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 693.891662] env[70020]: DEBUG nova.virt.hardware [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 693.891842] env[70020]: DEBUG nova.virt.hardware [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 693.892015] env[70020]: DEBUG nova.virt.hardware [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 693.892256] env[70020]: DEBUG nova.virt.hardware [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 693.892408] env[70020]: DEBUG nova.virt.hardware [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 693.892580] env[70020]: DEBUG nova.virt.hardware [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 693.892743] env[70020]: DEBUG nova.virt.hardware [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 693.892964] env[70020]: DEBUG nova.virt.hardware [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 693.893902] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-790d8a7e-6b8c-4835-b940-44ef4c0f0eff {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.903198] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f40027d-52b0-49b6-9762-516da645ccc6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.946056] env[70020]: DEBUG oslo_concurrency.lockutils [None req-10eb92e6-791a-4740-83c8-cf7e73be4011 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Lock "f53cb08c-0939-4cb1-8476-8b289d6a1b05" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.554s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 694.012843] env[70020]: DEBUG nova.compute.manager [req-77b76988-0d3a-451e-817d-a3dfea648c92 req-a5262317-b60a-40e5-944b-ebf4654e246e service nova] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Received event network-changed-4e709a63-45c3-48e8-8762-26e149c61266 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 694.013094] env[70020]: DEBUG nova.compute.manager [req-77b76988-0d3a-451e-817d-a3dfea648c92 req-a5262317-b60a-40e5-944b-ebf4654e246e service nova] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Refreshing instance network info cache due to event network-changed-4e709a63-45c3-48e8-8762-26e149c61266. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 694.013298] env[70020]: DEBUG oslo_concurrency.lockutils [req-77b76988-0d3a-451e-817d-a3dfea648c92 req-a5262317-b60a-40e5-944b-ebf4654e246e service nova] Acquiring lock "refresh_cache-d601179a-df77-4f2e-b8df-9185b8a485e3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.013448] env[70020]: DEBUG oslo_concurrency.lockutils [req-77b76988-0d3a-451e-817d-a3dfea648c92 req-a5262317-b60a-40e5-944b-ebf4654e246e service nova] Acquired lock "refresh_cache-d601179a-df77-4f2e-b8df-9185b8a485e3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 694.013650] env[70020]: DEBUG nova.network.neutron [req-77b76988-0d3a-451e-817d-a3dfea648c92 req-a5262317-b60a-40e5-944b-ebf4654e246e service nova] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Refreshing network info cache for port 4e709a63-45c3-48e8-8762-26e149c61266 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 694.032965] env[70020]: DEBUG nova.compute.manager [req-018ae63d-c2c7-404f-848f-465c555ce086 req-c67b743e-6a89-4016-84c5-c97a0ecfba82 service nova] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Received event network-vif-deleted-5107e297-9610-48b2-bcdf-2ec121611559 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 694.032965] env[70020]: DEBUG nova.compute.manager [req-018ae63d-c2c7-404f-848f-465c555ce086 req-c67b743e-6a89-4016-84c5-c97a0ecfba82 service nova] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Received event network-vif-deleted-b2ae7c4e-fcb4-4d62-9ff1-82de773af513 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 694.076391] env[70020]: DEBUG nova.virt.hardware [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 694.076788] env[70020]: DEBUG nova.virt.hardware [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 694.076788] env[70020]: DEBUG nova.virt.hardware [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 694.077128] env[70020]: DEBUG nova.virt.hardware [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 694.077207] env[70020]: DEBUG nova.virt.hardware [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 694.077290] env[70020]: DEBUG nova.virt.hardware [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 694.077490] env[70020]: DEBUG nova.virt.hardware [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 694.077643] env[70020]: DEBUG nova.virt.hardware [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 694.077806] env[70020]: DEBUG nova.virt.hardware [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 694.077965] env[70020]: DEBUG nova.virt.hardware [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 694.078154] env[70020]: DEBUG nova.virt.hardware [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 694.084276] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ba0efcb9-90aa-441b-b3f5-8a09dcbe46eb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.103435] env[70020]: DEBUG nova.scheduler.client.report [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 694.106918] env[70020]: DEBUG oslo_vmware.api [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for the task: (returnval){ [ 694.106918] env[70020]: value = "task-3617926" [ 694.106918] env[70020]: _type = "Task" [ 694.106918] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.118627] env[70020]: DEBUG oslo_vmware.api [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3617926, 'name': ReconfigVM_Task} progress is 10%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.292965] env[70020]: DEBUG oslo_concurrency.lockutils [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Acquiring lock "c4335d00-29a3-4f2e-b826-1a78ef02e0bf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 694.293253] env[70020]: DEBUG oslo_concurrency.lockutils [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Lock "c4335d00-29a3-4f2e-b826-1a78ef02e0bf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 694.293466] env[70020]: DEBUG oslo_concurrency.lockutils [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Acquiring lock "c4335d00-29a3-4f2e-b826-1a78ef02e0bf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 694.293651] env[70020]: DEBUG oslo_concurrency.lockutils [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Lock "c4335d00-29a3-4f2e-b826-1a78ef02e0bf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 694.297018] env[70020]: DEBUG oslo_concurrency.lockutils [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Lock "c4335d00-29a3-4f2e-b826-1a78ef02e0bf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 694.297018] env[70020]: INFO nova.compute.manager [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Terminating instance [ 694.318975] env[70020]: INFO nova.compute.manager [-] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Took 1.34 seconds to deallocate network for instance. [ 694.351608] env[70020]: DEBUG oslo_vmware.api [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3617925, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.726462} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.351913] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 0caa6acd-29d4-43ee-8b32-5149462dfc1c/0caa6acd-29d4-43ee-8b32-5149462dfc1c.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 694.352196] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 694.352465] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-196969a1-f9d5-456f-99d4-a3b454572c3c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.362012] env[70020]: DEBUG oslo_vmware.api [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 694.362012] env[70020]: value = "task-3617927" [ 694.362012] env[70020]: _type = "Task" [ 694.362012] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.376460] env[70020]: DEBUG oslo_vmware.api [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3617927, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.448812] env[70020]: DEBUG nova.compute.manager [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 694.609124] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.765s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 694.609946] env[70020]: DEBUG nova.compute.manager [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 694.613192] env[70020]: DEBUG oslo_concurrency.lockutils [None req-acc22b6f-6bb0-4687-81f2-2b3f3e1fecf5 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.033s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 694.613619] env[70020]: DEBUG nova.objects.instance [None req-acc22b6f-6bb0-4687-81f2-2b3f3e1fecf5 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lazy-loading 'resources' on Instance uuid d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 694.633915] env[70020]: DEBUG oslo_vmware.api [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3617926, 'name': ReconfigVM_Task, 'duration_secs': 0.211012} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.634601] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Updating instance '4b5750d4-98ec-4c70-b214-fad97060b606' progress to 33 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 694.800624] env[70020]: DEBUG nova.compute.manager [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 694.800905] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 694.804426] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-464342e5-14ee-4bf6-9e2e-326a1a5056f9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.813542] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 694.813713] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e6fcad21-cff4-4284-b3ec-4b6e8ff8df24 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.822645] env[70020]: DEBUG oslo_vmware.api [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Waiting for the task: (returnval){ [ 694.822645] env[70020]: value = "task-3617928" [ 694.822645] env[70020]: _type = "Task" [ 694.822645] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.832827] env[70020]: DEBUG oslo_concurrency.lockutils [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 694.840029] env[70020]: DEBUG oslo_vmware.api [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Task: {'id': task-3617928, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.872663] env[70020]: DEBUG oslo_vmware.api [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3617927, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094613} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.872902] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 694.873612] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd1a1733-f0a0-4dbd-9328-323568ef5ae8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.900442] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Reconfiguring VM instance instance-0000001b to attach disk [datastore2] 0caa6acd-29d4-43ee-8b32-5149462dfc1c/0caa6acd-29d4-43ee-8b32-5149462dfc1c.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 694.903565] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b1990d1a-1264-40db-9426-b3ace2982fc7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.930894] env[70020]: DEBUG oslo_vmware.api [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 694.930894] env[70020]: value = "task-3617929" [ 694.930894] env[70020]: _type = "Task" [ 694.930894] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.939646] env[70020]: DEBUG oslo_vmware.api [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3617929, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.975480] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 695.008397] env[70020]: DEBUG nova.network.neutron [req-77b76988-0d3a-451e-817d-a3dfea648c92 req-a5262317-b60a-40e5-944b-ebf4654e246e service nova] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Updated VIF entry in instance network info cache for port 4e709a63-45c3-48e8-8762-26e149c61266. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 695.008830] env[70020]: DEBUG nova.network.neutron [req-77b76988-0d3a-451e-817d-a3dfea648c92 req-a5262317-b60a-40e5-944b-ebf4654e246e service nova] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Updating instance_info_cache with network_info: [{"id": "4e709a63-45c3-48e8-8762-26e149c61266", "address": "fa:16:3e:ef:1c:11", "network": {"id": "b284478c-c5d9-48ae-8f9e-db4ed9b7d525", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-354423789-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "add37b0346e74e7f9724e69253e2cffc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b931c4c-f73c-4fbd-9c9f-0270834cc69e", "external-id": "nsx-vlan-transportzone-177", "segmentation_id": 177, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e709a63-45", "ovs_interfaceid": "4e709a63-45c3-48e8-8762-26e149c61266", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.128348] env[70020]: DEBUG nova.compute.utils [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 695.130488] env[70020]: DEBUG nova.compute.manager [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 695.130731] env[70020]: DEBUG nova.network.neutron [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 695.143537] env[70020]: DEBUG nova.virt.hardware [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 695.143537] env[70020]: DEBUG nova.virt.hardware [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 695.143757] env[70020]: DEBUG nova.virt.hardware [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 695.145776] env[70020]: DEBUG nova.virt.hardware [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 695.145776] env[70020]: DEBUG nova.virt.hardware [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 695.145776] env[70020]: DEBUG nova.virt.hardware [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 695.145776] env[70020]: DEBUG nova.virt.hardware [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 695.145776] env[70020]: DEBUG nova.virt.hardware [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 695.146116] env[70020]: DEBUG nova.virt.hardware [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 695.146116] env[70020]: DEBUG nova.virt.hardware [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 695.146116] env[70020]: DEBUG nova.virt.hardware [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 695.150852] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Reconfiguring VM instance instance-00000011 to detach disk 2000 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 695.152041] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ffa12f51-de95-4da0-ba6c-dceed46a982b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.177260] env[70020]: DEBUG oslo_vmware.api [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for the task: (returnval){ [ 695.177260] env[70020]: value = "task-3617930" [ 695.177260] env[70020]: _type = "Task" [ 695.177260] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.182992] env[70020]: DEBUG nova.policy [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7b543e081f574f1f85874775a734a0a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4e3eae740ef84ef88aef113ed4d6e57b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 695.193515] env[70020]: DEBUG oslo_vmware.api [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3617930, 'name': ReconfigVM_Task} progress is 10%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.311241] env[70020]: DEBUG nova.network.neutron [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Successfully updated port: dbdfc4ab-6655-403a-8fa4-9d2cd2e84728 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 695.337893] env[70020]: DEBUG oslo_vmware.api [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Task: {'id': task-3617928, 'name': PowerOffVM_Task, 'duration_secs': 0.319754} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.340907] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 695.341194] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 695.341788] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5f875948-99fa-42ea-b4a3-93a2992c1ec6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.460900] env[70020]: DEBUG oslo_vmware.api [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3617929, 'name': ReconfigVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.462619] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 695.462863] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 695.463342] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Deleting the datastore file [datastore1] c4335d00-29a3-4f2e-b826-1a78ef02e0bf {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 695.463587] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-182a5d8e-da77-426c-998d-7e8859583a78 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.478966] env[70020]: DEBUG oslo_vmware.api [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Waiting for the task: (returnval){ [ 695.478966] env[70020]: value = "task-3617932" [ 695.478966] env[70020]: _type = "Task" [ 695.478966] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.492989] env[70020]: DEBUG oslo_vmware.api [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Task: {'id': task-3617932, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.511673] env[70020]: DEBUG oslo_concurrency.lockutils [req-77b76988-0d3a-451e-817d-a3dfea648c92 req-a5262317-b60a-40e5-944b-ebf4654e246e service nova] Releasing lock "refresh_cache-d601179a-df77-4f2e-b8df-9185b8a485e3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 695.567980] env[70020]: DEBUG nova.network.neutron [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Successfully created port: 4b681dd6-fab3-4812-988e-26b219b6c5c3 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 695.635260] env[70020]: DEBUG nova.compute.manager [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 695.689160] env[70020]: DEBUG oslo_vmware.api [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3617930, 'name': ReconfigVM_Task, 'duration_secs': 0.302411} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.689160] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Reconfigured VM instance instance-00000011 to detach disk 2000 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 695.690130] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dfb7103-f8c2-47ac-b193-22f276e36c64 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.723888] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Reconfiguring VM instance instance-00000011 to attach disk [datastore2] 4b5750d4-98ec-4c70-b214-fad97060b606/4b5750d4-98ec-4c70-b214-fad97060b606.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 695.727110] env[70020]: DEBUG oslo_concurrency.lockutils [None req-deebd809-0e5e-497a-a8ad-ca9fe220b364 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Acquiring lock "832a38c8-ed3a-460b-91bd-0138d2f2d03d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 695.727400] env[70020]: DEBUG oslo_concurrency.lockutils [None req-deebd809-0e5e-497a-a8ad-ca9fe220b364 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Lock "832a38c8-ed3a-460b-91bd-0138d2f2d03d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 695.727638] env[70020]: DEBUG oslo_concurrency.lockutils [None req-deebd809-0e5e-497a-a8ad-ca9fe220b364 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Acquiring lock "832a38c8-ed3a-460b-91bd-0138d2f2d03d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 695.727863] env[70020]: DEBUG oslo_concurrency.lockutils [None req-deebd809-0e5e-497a-a8ad-ca9fe220b364 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Lock "832a38c8-ed3a-460b-91bd-0138d2f2d03d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 695.728087] env[70020]: DEBUG oslo_concurrency.lockutils [None req-deebd809-0e5e-497a-a8ad-ca9fe220b364 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Lock "832a38c8-ed3a-460b-91bd-0138d2f2d03d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 695.729661] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8d19c142-7e83-474e-bccf-6cfb7fff05c1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.743958] env[70020]: INFO nova.compute.manager [None req-deebd809-0e5e-497a-a8ad-ca9fe220b364 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Terminating instance [ 695.753742] env[70020]: DEBUG oslo_vmware.api [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for the task: (returnval){ [ 695.753742] env[70020]: value = "task-3617933" [ 695.753742] env[70020]: _type = "Task" [ 695.753742] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.764142] env[70020]: DEBUG oslo_vmware.api [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3617933, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.809059] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f7b843e-575b-4139-b2c8-d28e2d4daf13 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.815534] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "refresh_cache-1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.815676] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquired lock "refresh_cache-1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 695.815822] env[70020]: DEBUG nova.network.neutron [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 695.823723] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a66b0045-2881-4741-99e0-3a8add1bb123 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.857916] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fbd169c-e38f-446e-985f-d1ac9f839fa6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.868243] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d0f527f-a54b-43a0-9c7c-a83980d5a5f5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.883959] env[70020]: DEBUG nova.compute.provider_tree [None req-acc22b6f-6bb0-4687-81f2-2b3f3e1fecf5 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 695.944392] env[70020]: DEBUG oslo_vmware.api [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3617929, 'name': ReconfigVM_Task, 'duration_secs': 0.573407} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.944392] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Reconfigured VM instance instance-0000001b to attach disk [datastore2] 0caa6acd-29d4-43ee-8b32-5149462dfc1c/0caa6acd-29d4-43ee-8b32-5149462dfc1c.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 695.945357] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d93b7925-9ec3-449d-a70c-87a9a057e899 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.954362] env[70020]: DEBUG oslo_vmware.api [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 695.954362] env[70020]: value = "task-3617934" [ 695.954362] env[70020]: _type = "Task" [ 695.954362] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.963973] env[70020]: DEBUG oslo_vmware.api [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3617934, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.988529] env[70020]: DEBUG oslo_vmware.api [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Task: {'id': task-3617932, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.394236} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.988832] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 695.989037] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 695.989229] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 695.989378] env[70020]: INFO nova.compute.manager [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Took 1.19 seconds to destroy the instance on the hypervisor. [ 695.989621] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 695.989816] env[70020]: DEBUG nova.compute.manager [-] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 695.989888] env[70020]: DEBUG nova.network.neutron [-] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 696.252018] env[70020]: DEBUG nova.compute.manager [None req-deebd809-0e5e-497a-a8ad-ca9fe220b364 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 696.252317] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-deebd809-0e5e-497a-a8ad-ca9fe220b364 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 696.253654] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05ff82d6-bfef-4592-9312-79351d5bbfd0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.268403] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-deebd809-0e5e-497a-a8ad-ca9fe220b364 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 696.268655] env[70020]: DEBUG oslo_vmware.api [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3617933, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.268856] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3d25aa04-cef5-4514-8cdc-47762219beae {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.276986] env[70020]: DEBUG oslo_vmware.api [None req-deebd809-0e5e-497a-a8ad-ca9fe220b364 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for the task: (returnval){ [ 696.276986] env[70020]: value = "task-3617935" [ 696.276986] env[70020]: _type = "Task" [ 696.276986] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.286038] env[70020]: DEBUG oslo_vmware.api [None req-deebd809-0e5e-497a-a8ad-ca9fe220b364 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3617935, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.317018] env[70020]: DEBUG nova.compute.manager [req-3869794f-03dc-4415-aded-1f266bef21dd req-d5226e91-d957-427d-b263-7a32da97086e service nova] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Received event network-changed-4e709a63-45c3-48e8-8762-26e149c61266 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 696.317018] env[70020]: DEBUG nova.compute.manager [req-3869794f-03dc-4415-aded-1f266bef21dd req-d5226e91-d957-427d-b263-7a32da97086e service nova] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Refreshing instance network info cache due to event network-changed-4e709a63-45c3-48e8-8762-26e149c61266. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 696.317018] env[70020]: DEBUG oslo_concurrency.lockutils [req-3869794f-03dc-4415-aded-1f266bef21dd req-d5226e91-d957-427d-b263-7a32da97086e service nova] Acquiring lock "refresh_cache-d601179a-df77-4f2e-b8df-9185b8a485e3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 696.317018] env[70020]: DEBUG oslo_concurrency.lockutils [req-3869794f-03dc-4415-aded-1f266bef21dd req-d5226e91-d957-427d-b263-7a32da97086e service nova] Acquired lock "refresh_cache-d601179a-df77-4f2e-b8df-9185b8a485e3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 696.317018] env[70020]: DEBUG nova.network.neutron [req-3869794f-03dc-4415-aded-1f266bef21dd req-d5226e91-d957-427d-b263-7a32da97086e service nova] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Refreshing network info cache for port 4e709a63-45c3-48e8-8762-26e149c61266 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 696.357497] env[70020]: DEBUG nova.network.neutron [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 696.386650] env[70020]: DEBUG nova.scheduler.client.report [None req-acc22b6f-6bb0-4687-81f2-2b3f3e1fecf5 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 696.415498] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2cbe7600-35f6-4f3e-a56e-8a7a2adbc90b tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Acquiring lock "50ce7a0c-aa80-4816-b84e-d8ff7b10fffb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 696.474357] env[70020]: DEBUG oslo_vmware.api [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3617934, 'name': Rename_Task, 'duration_secs': 0.404945} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.475055] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 696.475619] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e14902a8-5290-4a46-b548-c24dcac40d3d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.490435] env[70020]: DEBUG oslo_vmware.api [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 696.490435] env[70020]: value = "task-3617936" [ 696.490435] env[70020]: _type = "Task" [ 696.490435] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.502759] env[70020]: DEBUG oslo_vmware.api [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3617936, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.542142] env[70020]: DEBUG nova.network.neutron [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Updating instance_info_cache with network_info: [{"id": "dbdfc4ab-6655-403a-8fa4-9d2cd2e84728", "address": "fa:16:3e:cd:78:96", "network": {"id": "8a7c653b-f915-404f-a4ab-bea48b7e6574", "bridge": "br-int", "label": "tempest-ImagesTestJSON-377932844-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52cd193f3ca7403a986d72f072590f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdbdfc4ab-66", "ovs_interfaceid": "dbdfc4ab-6655-403a-8fa4-9d2cd2e84728", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 696.645928] env[70020]: DEBUG nova.compute.manager [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 696.675107] env[70020]: DEBUG nova.virt.hardware [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 696.675375] env[70020]: DEBUG nova.virt.hardware [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 696.675530] env[70020]: DEBUG nova.virt.hardware [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 696.675713] env[70020]: DEBUG nova.virt.hardware [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 696.675860] env[70020]: DEBUG nova.virt.hardware [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 696.676029] env[70020]: DEBUG nova.virt.hardware [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 696.676243] env[70020]: DEBUG nova.virt.hardware [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 696.676399] env[70020]: DEBUG nova.virt.hardware [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 696.676563] env[70020]: DEBUG nova.virt.hardware [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 696.676722] env[70020]: DEBUG nova.virt.hardware [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 696.676892] env[70020]: DEBUG nova.virt.hardware [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 696.677794] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30c678a3-e9ce-4fd8-b1d8-5586d60f5ff4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.686731] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5e73c48-9e2f-44e1-90ab-cb0008b38e49 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.768025] env[70020]: DEBUG oslo_vmware.api [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3617933, 'name': ReconfigVM_Task, 'duration_secs': 0.807483} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.769328] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Reconfigured VM instance instance-00000011 to attach disk [datastore2] 4b5750d4-98ec-4c70-b214-fad97060b606/4b5750d4-98ec-4c70-b214-fad97060b606.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 696.769328] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Updating instance '4b5750d4-98ec-4c70-b214-fad97060b606' progress to 50 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 696.787072] env[70020]: DEBUG oslo_vmware.api [None req-deebd809-0e5e-497a-a8ad-ca9fe220b364 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3617935, 'name': PowerOffVM_Task, 'duration_secs': 0.244612} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.788085] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-deebd809-0e5e-497a-a8ad-ca9fe220b364 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 696.788393] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-deebd809-0e5e-497a-a8ad-ca9fe220b364 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 696.788949] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5dd00fde-679a-43fd-9a9d-cc2d1a161927 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.879571] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-deebd809-0e5e-497a-a8ad-ca9fe220b364 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 696.881840] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-deebd809-0e5e-497a-a8ad-ca9fe220b364 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 696.881840] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-deebd809-0e5e-497a-a8ad-ca9fe220b364 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Deleting the datastore file [datastore1] 832a38c8-ed3a-460b-91bd-0138d2f2d03d {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 696.881840] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-466eaccb-737a-4517-838f-85f484a6dac8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.895165] env[70020]: DEBUG oslo_concurrency.lockutils [None req-acc22b6f-6bb0-4687-81f2-2b3f3e1fecf5 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.282s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 696.902513] env[70020]: DEBUG oslo_vmware.api [None req-deebd809-0e5e-497a-a8ad-ca9fe220b364 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for the task: (returnval){ [ 696.902513] env[70020]: value = "task-3617938" [ 696.902513] env[70020]: _type = "Task" [ 696.902513] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.904883] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.778s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 696.907034] env[70020]: INFO nova.compute.claims [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 696.909822] env[70020]: DEBUG nova.network.neutron [-] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 696.926092] env[70020]: DEBUG oslo_vmware.api [None req-deebd809-0e5e-497a-a8ad-ca9fe220b364 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3617938, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.928986] env[70020]: INFO nova.scheduler.client.report [None req-acc22b6f-6bb0-4687-81f2-2b3f3e1fecf5 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Deleted allocations for instance d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456 [ 697.007235] env[70020]: DEBUG oslo_vmware.api [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3617936, 'name': PowerOnVM_Task} progress is 88%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.045584] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Releasing lock "refresh_cache-1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 697.045943] env[70020]: DEBUG nova.compute.manager [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Instance network_info: |[{"id": "dbdfc4ab-6655-403a-8fa4-9d2cd2e84728", "address": "fa:16:3e:cd:78:96", "network": {"id": "8a7c653b-f915-404f-a4ab-bea48b7e6574", "bridge": "br-int", "label": "tempest-ImagesTestJSON-377932844-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52cd193f3ca7403a986d72f072590f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdbdfc4ab-66", "ovs_interfaceid": "dbdfc4ab-6655-403a-8fa4-9d2cd2e84728", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 697.046372] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cd:78:96', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a0734cc4-5718-45e2-9f98-0ded96880bef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dbdfc4ab-6655-403a-8fa4-9d2cd2e84728', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 697.055746] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 697.056412] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 697.057011] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c5913cc9-c99c-4958-9331-285ee52b6161 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.087332] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 697.087332] env[70020]: value = "task-3617939" [ 697.087332] env[70020]: _type = "Task" [ 697.087332] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.098086] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617939, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.130674] env[70020]: DEBUG nova.compute.manager [req-ded463be-95b7-4419-961c-e3fa91b44f93 req-929a2d73-dcf7-4b23-9225-67942a76715b service nova] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Received event network-vif-plugged-4b681dd6-fab3-4812-988e-26b219b6c5c3 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 697.130941] env[70020]: DEBUG oslo_concurrency.lockutils [req-ded463be-95b7-4419-961c-e3fa91b44f93 req-929a2d73-dcf7-4b23-9225-67942a76715b service nova] Acquiring lock "8adadb2e-2a20-45b1-bed8-34e09df25f39-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.131171] env[70020]: DEBUG oslo_concurrency.lockutils [req-ded463be-95b7-4419-961c-e3fa91b44f93 req-929a2d73-dcf7-4b23-9225-67942a76715b service nova] Lock "8adadb2e-2a20-45b1-bed8-34e09df25f39-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.131334] env[70020]: DEBUG oslo_concurrency.lockutils [req-ded463be-95b7-4419-961c-e3fa91b44f93 req-929a2d73-dcf7-4b23-9225-67942a76715b service nova] Lock "8adadb2e-2a20-45b1-bed8-34e09df25f39-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 697.131492] env[70020]: DEBUG nova.compute.manager [req-ded463be-95b7-4419-961c-e3fa91b44f93 req-929a2d73-dcf7-4b23-9225-67942a76715b service nova] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] No waiting events found dispatching network-vif-plugged-4b681dd6-fab3-4812-988e-26b219b6c5c3 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 697.131647] env[70020]: WARNING nova.compute.manager [req-ded463be-95b7-4419-961c-e3fa91b44f93 req-929a2d73-dcf7-4b23-9225-67942a76715b service nova] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Received unexpected event network-vif-plugged-4b681dd6-fab3-4812-988e-26b219b6c5c3 for instance with vm_state building and task_state spawning. [ 697.207326] env[70020]: DEBUG nova.network.neutron [req-3869794f-03dc-4415-aded-1f266bef21dd req-d5226e91-d957-427d-b263-7a32da97086e service nova] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Updated VIF entry in instance network info cache for port 4e709a63-45c3-48e8-8762-26e149c61266. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 697.207497] env[70020]: DEBUG nova.network.neutron [req-3869794f-03dc-4415-aded-1f266bef21dd req-d5226e91-d957-427d-b263-7a32da97086e service nova] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Updating instance_info_cache with network_info: [{"id": "4e709a63-45c3-48e8-8762-26e149c61266", "address": "fa:16:3e:ef:1c:11", "network": {"id": "b284478c-c5d9-48ae-8f9e-db4ed9b7d525", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-354423789-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "add37b0346e74e7f9724e69253e2cffc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b931c4c-f73c-4fbd-9c9f-0270834cc69e", "external-id": "nsx-vlan-transportzone-177", "segmentation_id": 177, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e709a63-45", "ovs_interfaceid": "4e709a63-45c3-48e8-8762-26e149c61266", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 697.272986] env[70020]: DEBUG nova.network.neutron [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Successfully updated port: 4b681dd6-fab3-4812-988e-26b219b6c5c3 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 697.279568] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df387115-f8ee-467f-a730-643b5c9eb352 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.309938] env[70020]: DEBUG oslo_vmware.rw_handles [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523050f0-3f41-84ce-0717-72c6cf43ac46/disk-0.vmdk. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 697.310781] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96ff8c55-2dec-4c10-9eca-11309c3078be {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.313868] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d3dbe76-e995-4143-aeef-5377b46e7dd4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.321122] env[70020]: DEBUG oslo_vmware.rw_handles [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523050f0-3f41-84ce-0717-72c6cf43ac46/disk-0.vmdk is in state: ready. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 697.321297] env[70020]: ERROR oslo_vmware.rw_handles [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523050f0-3f41-84ce-0717-72c6cf43ac46/disk-0.vmdk due to incomplete transfer. [ 697.335046] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-96388dc2-f447-490f-b272-900f5f2c2aa0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.337274] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Updating instance '4b5750d4-98ec-4c70-b214-fad97060b606' progress to 67 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 697.351986] env[70020]: DEBUG oslo_vmware.rw_handles [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523050f0-3f41-84ce-0717-72c6cf43ac46/disk-0.vmdk. {{(pid=70020) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 697.352292] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Uploaded image b4a35e1b-e812-4975-a414-1bbb1e714c78 to the Glance image server {{(pid=70020) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 697.354526] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Destroying the VM {{(pid=70020) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 697.355181] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-3b33b436-d1db-4808-b189-168d33b83ae7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.363247] env[70020]: DEBUG oslo_vmware.api [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Waiting for the task: (returnval){ [ 697.363247] env[70020]: value = "task-3617940" [ 697.363247] env[70020]: _type = "Task" [ 697.363247] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.372928] env[70020]: DEBUG oslo_vmware.api [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617940, 'name': Destroy_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.424156] env[70020]: INFO nova.compute.manager [-] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Took 1.43 seconds to deallocate network for instance. [ 697.424721] env[70020]: DEBUG oslo_vmware.api [None req-deebd809-0e5e-497a-a8ad-ca9fe220b364 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3617938, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.296202} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.427026] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-deebd809-0e5e-497a-a8ad-ca9fe220b364 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 697.427328] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-deebd809-0e5e-497a-a8ad-ca9fe220b364 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 697.427542] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-deebd809-0e5e-497a-a8ad-ca9fe220b364 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 697.428042] env[70020]: INFO nova.compute.manager [None req-deebd809-0e5e-497a-a8ad-ca9fe220b364 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Took 1.18 seconds to destroy the instance on the hypervisor. [ 697.428404] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-deebd809-0e5e-497a-a8ad-ca9fe220b364 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 697.432406] env[70020]: DEBUG nova.compute.manager [-] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 697.432546] env[70020]: DEBUG nova.network.neutron [-] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 697.439493] env[70020]: DEBUG oslo_concurrency.lockutils [None req-acc22b6f-6bb0-4687-81f2-2b3f3e1fecf5 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.834s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 697.498411] env[70020]: DEBUG oslo_vmware.api [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3617936, 'name': PowerOnVM_Task, 'duration_secs': 0.863913} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.501128] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 697.501335] env[70020]: INFO nova.compute.manager [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Took 9.31 seconds to spawn the instance on the hypervisor. [ 697.501504] env[70020]: DEBUG nova.compute.manager [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 697.502418] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3b5b03b-09e6-44a3-86d1-5ce22dfa80b5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.599382] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617939, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.711727] env[70020]: DEBUG oslo_concurrency.lockutils [req-3869794f-03dc-4415-aded-1f266bef21dd req-d5226e91-d957-427d-b263-7a32da97086e service nova] Releasing lock "refresh_cache-d601179a-df77-4f2e-b8df-9185b8a485e3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 697.714638] env[70020]: DEBUG nova.compute.manager [req-3869794f-03dc-4415-aded-1f266bef21dd req-d5226e91-d957-427d-b263-7a32da97086e service nova] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Received event network-vif-plugged-dbdfc4ab-6655-403a-8fa4-9d2cd2e84728 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 697.714638] env[70020]: DEBUG oslo_concurrency.lockutils [req-3869794f-03dc-4415-aded-1f266bef21dd req-d5226e91-d957-427d-b263-7a32da97086e service nova] Acquiring lock "1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.714638] env[70020]: DEBUG oslo_concurrency.lockutils [req-3869794f-03dc-4415-aded-1f266bef21dd req-d5226e91-d957-427d-b263-7a32da97086e service nova] Lock "1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.714638] env[70020]: DEBUG oslo_concurrency.lockutils [req-3869794f-03dc-4415-aded-1f266bef21dd req-d5226e91-d957-427d-b263-7a32da97086e service nova] Lock "1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 697.714638] env[70020]: DEBUG nova.compute.manager [req-3869794f-03dc-4415-aded-1f266bef21dd req-d5226e91-d957-427d-b263-7a32da97086e service nova] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] No waiting events found dispatching network-vif-plugged-dbdfc4ab-6655-403a-8fa4-9d2cd2e84728 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 697.714902] env[70020]: WARNING nova.compute.manager [req-3869794f-03dc-4415-aded-1f266bef21dd req-d5226e91-d957-427d-b263-7a32da97086e service nova] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Received unexpected event network-vif-plugged-dbdfc4ab-6655-403a-8fa4-9d2cd2e84728 for instance with vm_state building and task_state spawning. [ 697.714902] env[70020]: DEBUG nova.compute.manager [req-3869794f-03dc-4415-aded-1f266bef21dd req-d5226e91-d957-427d-b263-7a32da97086e service nova] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Received event network-changed-dbdfc4ab-6655-403a-8fa4-9d2cd2e84728 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 697.714902] env[70020]: DEBUG nova.compute.manager [req-3869794f-03dc-4415-aded-1f266bef21dd req-d5226e91-d957-427d-b263-7a32da97086e service nova] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Refreshing instance network info cache due to event network-changed-dbdfc4ab-6655-403a-8fa4-9d2cd2e84728. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 697.714902] env[70020]: DEBUG oslo_concurrency.lockutils [req-3869794f-03dc-4415-aded-1f266bef21dd req-d5226e91-d957-427d-b263-7a32da97086e service nova] Acquiring lock "refresh_cache-1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.714902] env[70020]: DEBUG oslo_concurrency.lockutils [req-3869794f-03dc-4415-aded-1f266bef21dd req-d5226e91-d957-427d-b263-7a32da97086e service nova] Acquired lock "refresh_cache-1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 697.715048] env[70020]: DEBUG nova.network.neutron [req-3869794f-03dc-4415-aded-1f266bef21dd req-d5226e91-d957-427d-b263-7a32da97086e service nova] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Refreshing network info cache for port dbdfc4ab-6655-403a-8fa4-9d2cd2e84728 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 697.759365] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e3578fc1-7c65-499b-8859-80b834f3d8a9 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Acquiring lock "f53cb08c-0939-4cb1-8476-8b289d6a1b05" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.759365] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e3578fc1-7c65-499b-8859-80b834f3d8a9 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Lock "f53cb08c-0939-4cb1-8476-8b289d6a1b05" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.759365] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e3578fc1-7c65-499b-8859-80b834f3d8a9 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Acquiring lock "f53cb08c-0939-4cb1-8476-8b289d6a1b05-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.759365] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e3578fc1-7c65-499b-8859-80b834f3d8a9 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Lock "f53cb08c-0939-4cb1-8476-8b289d6a1b05-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.759645] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e3578fc1-7c65-499b-8859-80b834f3d8a9 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Lock "f53cb08c-0939-4cb1-8476-8b289d6a1b05-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 697.761319] env[70020]: INFO nova.compute.manager [None req-e3578fc1-7c65-499b-8859-80b834f3d8a9 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Terminating instance [ 697.780094] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "refresh_cache-8adadb2e-2a20-45b1-bed8-34e09df25f39" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.780201] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquired lock "refresh_cache-8adadb2e-2a20-45b1-bed8-34e09df25f39" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 697.780355] env[70020]: DEBUG nova.network.neutron [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 697.876341] env[70020]: DEBUG oslo_vmware.api [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617940, 'name': Destroy_Task} progress is 33%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.885472] env[70020]: DEBUG nova.network.neutron [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Port b1d9f41a-978e-4fe2-bb42-1a2ab68ce1b2 binding to destination host cpu-1 is already ACTIVE {{(pid=70020) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 697.938030] env[70020]: DEBUG oslo_concurrency.lockutils [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 698.025783] env[70020]: INFO nova.compute.manager [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Took 40.17 seconds to build instance. [ 698.098373] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617939, 'name': CreateVM_Task, 'duration_secs': 0.521155} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.098557] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 698.099238] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 698.099401] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 698.099711] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 698.099962] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c2946bc-ad6e-4f60-bbfb-cd1a4ef36bea {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.105045] env[70020]: DEBUG oslo_vmware.api [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 698.105045] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5205fc0f-3b5d-c629-0861-a5fe350e7730" [ 698.105045] env[70020]: _type = "Task" [ 698.105045] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.115472] env[70020]: DEBUG oslo_vmware.api [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5205fc0f-3b5d-c629-0861-a5fe350e7730, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.231528] env[70020]: DEBUG nova.network.neutron [-] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.265557] env[70020]: DEBUG nova.compute.manager [None req-e3578fc1-7c65-499b-8859-80b834f3d8a9 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 698.266701] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e3578fc1-7c65-499b-8859-80b834f3d8a9 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 698.267234] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6c690a1-9e41-4d02-ac78-65746bc2ee6b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.281828] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3578fc1-7c65-499b-8859-80b834f3d8a9 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 698.285340] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d57b1144-e98d-4d53-b1ad-e3dbeae4c4d1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.298314] env[70020]: DEBUG oslo_vmware.api [None req-e3578fc1-7c65-499b-8859-80b834f3d8a9 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Waiting for the task: (returnval){ [ 698.298314] env[70020]: value = "task-3617941" [ 698.298314] env[70020]: _type = "Task" [ 698.298314] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.309645] env[70020]: DEBUG oslo_vmware.api [None req-e3578fc1-7c65-499b-8859-80b834f3d8a9 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Task: {'id': task-3617941, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.319045] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "c08166c5-2c31-4d40-a61c-c541924eb49c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 698.319045] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "c08166c5-2c31-4d40-a61c-c541924eb49c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 698.328256] env[70020]: DEBUG nova.network.neutron [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 698.375747] env[70020]: DEBUG oslo_vmware.api [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617940, 'name': Destroy_Task, 'duration_secs': 0.917143} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.376507] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Destroyed the VM [ 698.376507] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Deleting Snapshot of the VM instance {{(pid=70020) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 698.376507] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-1258a209-2a36-49d1-a07e-c8b6d672089b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.384992] env[70020]: DEBUG oslo_vmware.api [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Waiting for the task: (returnval){ [ 698.384992] env[70020]: value = "task-3617942" [ 698.384992] env[70020]: _type = "Task" [ 698.384992] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.404313] env[70020]: DEBUG oslo_vmware.api [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617942, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.442854] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6139ce1e-905e-4e65-b007-000834fe34ec {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.459160] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77f49b10-bf5c-495c-ab64-d55bcf0d247c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.493962] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b81992ae-d5a1-4337-8750-2b8427305abb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.505322] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23285f48-da57-467f-af6a-40f25def0ab0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.523786] env[70020]: DEBUG nova.compute.provider_tree [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 698.528283] env[70020]: DEBUG oslo_concurrency.lockutils [None req-153eb80c-74c4-49a7-8f37-4b1818908250 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "0caa6acd-29d4-43ee-8b32-5149462dfc1c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.223s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 698.561938] env[70020]: DEBUG nova.network.neutron [req-3869794f-03dc-4415-aded-1f266bef21dd req-d5226e91-d957-427d-b263-7a32da97086e service nova] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Updated VIF entry in instance network info cache for port dbdfc4ab-6655-403a-8fa4-9d2cd2e84728. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 698.562361] env[70020]: DEBUG nova.network.neutron [req-3869794f-03dc-4415-aded-1f266bef21dd req-d5226e91-d957-427d-b263-7a32da97086e service nova] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Updating instance_info_cache with network_info: [{"id": "dbdfc4ab-6655-403a-8fa4-9d2cd2e84728", "address": "fa:16:3e:cd:78:96", "network": {"id": "8a7c653b-f915-404f-a4ab-bea48b7e6574", "bridge": "br-int", "label": "tempest-ImagesTestJSON-377932844-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52cd193f3ca7403a986d72f072590f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdbdfc4ab-66", "ovs_interfaceid": "dbdfc4ab-6655-403a-8fa4-9d2cd2e84728", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.566046] env[70020]: DEBUG nova.network.neutron [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Updating instance_info_cache with network_info: [{"id": "4b681dd6-fab3-4812-988e-26b219b6c5c3", "address": "fa:16:3e:29:86:67", "network": {"id": "ba2c4d3c-4191-4de5-8533-90ca5dfc1dc0", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-599216784-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e3eae740ef84ef88aef113ed4d6e57b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b681dd6-fa", "ovs_interfaceid": "4b681dd6-fab3-4812-988e-26b219b6c5c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.587664] env[70020]: DEBUG nova.compute.manager [req-cb2e2e3b-b111-4708-971a-acb16616b143 req-51590dc1-5b22-4e4a-9d0a-bb6403de365d service nova] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Received event network-vif-deleted-0e881a0f-2c6d-43ba-ad4e-bc9ce172d429 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 698.617046] env[70020]: DEBUG oslo_vmware.api [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5205fc0f-3b5d-c629-0861-a5fe350e7730, 'name': SearchDatastore_Task, 'duration_secs': 0.041781} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.617364] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 698.617603] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 698.617855] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 698.617971] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 698.618168] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 698.618484] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ac567f5b-8973-4103-8f12-aec6e113895e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.665255] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 698.665255] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 698.666043] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9fccaac0-fbc3-4a03-b858-2a3785d98bf2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.673230] env[70020]: DEBUG oslo_vmware.api [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 698.673230] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52243bc5-9a5b-f155-fc8c-f1194b05ffd3" [ 698.673230] env[70020]: _type = "Task" [ 698.673230] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.681923] env[70020]: DEBUG oslo_vmware.api [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52243bc5-9a5b-f155-fc8c-f1194b05ffd3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.736046] env[70020]: INFO nova.compute.manager [-] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Took 1.30 seconds to deallocate network for instance. [ 698.806028] env[70020]: DEBUG oslo_vmware.api [None req-e3578fc1-7c65-499b-8859-80b834f3d8a9 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Task: {'id': task-3617941, 'name': PowerOffVM_Task, 'duration_secs': 0.423808} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.806152] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3578fc1-7c65-499b-8859-80b834f3d8a9 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 698.806301] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e3578fc1-7c65-499b-8859-80b834f3d8a9 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 698.806539] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-26acd442-6f36-4b2e-bf4b-0cd2ef85ac5a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.883336] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e3578fc1-7c65-499b-8859-80b834f3d8a9 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 698.883336] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e3578fc1-7c65-499b-8859-80b834f3d8a9 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 698.883457] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3578fc1-7c65-499b-8859-80b834f3d8a9 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Deleting the datastore file [datastore1] f53cb08c-0939-4cb1-8476-8b289d6a1b05 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 698.884171] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b198b4f7-5849-457c-9a02-5e481cb459ad {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.895580] env[70020]: DEBUG oslo_vmware.api [None req-e3578fc1-7c65-499b-8859-80b834f3d8a9 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Waiting for the task: (returnval){ [ 698.895580] env[70020]: value = "task-3617944" [ 698.895580] env[70020]: _type = "Task" [ 698.895580] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.907384] env[70020]: DEBUG oslo_vmware.api [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617942, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.912657] env[70020]: DEBUG oslo_concurrency.lockutils [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquiring lock "4b5750d4-98ec-4c70-b214-fad97060b606-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 698.912908] env[70020]: DEBUG oslo_concurrency.lockutils [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Lock "4b5750d4-98ec-4c70-b214-fad97060b606-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 698.913115] env[70020]: DEBUG oslo_concurrency.lockutils [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Lock "4b5750d4-98ec-4c70-b214-fad97060b606-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 698.927695] env[70020]: DEBUG oslo_vmware.api [None req-e3578fc1-7c65-499b-8859-80b834f3d8a9 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Task: {'id': task-3617944, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.028535] env[70020]: DEBUG nova.scheduler.client.report [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 699.032191] env[70020]: DEBUG nova.compute.manager [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 699.065630] env[70020]: DEBUG oslo_concurrency.lockutils [req-3869794f-03dc-4415-aded-1f266bef21dd req-d5226e91-d957-427d-b263-7a32da97086e service nova] Releasing lock "refresh_cache-1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 699.069237] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Releasing lock "refresh_cache-8adadb2e-2a20-45b1-bed8-34e09df25f39" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 699.069533] env[70020]: DEBUG nova.compute.manager [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Instance network_info: |[{"id": "4b681dd6-fab3-4812-988e-26b219b6c5c3", "address": "fa:16:3e:29:86:67", "network": {"id": "ba2c4d3c-4191-4de5-8533-90ca5dfc1dc0", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-599216784-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e3eae740ef84ef88aef113ed4d6e57b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b681dd6-fa", "ovs_interfaceid": "4b681dd6-fab3-4812-988e-26b219b6c5c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 699.069978] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:29:86:67', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5b21ab10-d886-4453-9472-9e11fb3c450d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4b681dd6-fab3-4812-988e-26b219b6c5c3', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 699.079120] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Creating folder: Project (4e3eae740ef84ef88aef113ed4d6e57b). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 699.079120] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7796be8b-1790-423e-9793-8311fefdc8fd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.093998] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Created folder: Project (4e3eae740ef84ef88aef113ed4d6e57b) in parent group-v721521. [ 699.094143] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Creating folder: Instances. Parent ref: group-v721616. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 699.094830] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0ab4a1cf-4a68-4179-985d-430849aecbe1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.106757] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Created folder: Instances in parent group-v721616. [ 699.107009] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 699.107260] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 699.107428] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6ec908b9-64f4-4a36-affd-2319449861fe {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.127612] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 699.127612] env[70020]: value = "task-3617947" [ 699.127612] env[70020]: _type = "Task" [ 699.127612] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.136936] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617947, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.186458] env[70020]: DEBUG oslo_vmware.api [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52243bc5-9a5b-f155-fc8c-f1194b05ffd3, 'name': SearchDatastore_Task, 'duration_secs': 0.011904} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.187362] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18f38ed8-16fa-4ae5-b88c-798de6b9827f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.195780] env[70020]: DEBUG oslo_vmware.api [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 699.195780] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]524b16a2-02dc-93e9-0f65-a4337f94ff3e" [ 699.195780] env[70020]: _type = "Task" [ 699.195780] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.206219] env[70020]: DEBUG oslo_vmware.api [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]524b16a2-02dc-93e9-0f65-a4337f94ff3e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.243084] env[70020]: DEBUG oslo_concurrency.lockutils [None req-deebd809-0e5e-497a-a8ad-ca9fe220b364 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 699.302927] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquiring lock "55c20886-ae10-4326-a9de-f8577f320a99" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 699.303215] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "55c20886-ae10-4326-a9de-f8577f320a99" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 699.395056] env[70020]: DEBUG nova.compute.manager [req-50b3748a-78c4-4e9d-8cdc-d24eabc61164 req-8fd44704-ac88-4554-825b-79165577414d service nova] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Received event network-changed-4b681dd6-fab3-4812-988e-26b219b6c5c3 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 699.395271] env[70020]: DEBUG nova.compute.manager [req-50b3748a-78c4-4e9d-8cdc-d24eabc61164 req-8fd44704-ac88-4554-825b-79165577414d service nova] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Refreshing instance network info cache due to event network-changed-4b681dd6-fab3-4812-988e-26b219b6c5c3. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 699.395485] env[70020]: DEBUG oslo_concurrency.lockutils [req-50b3748a-78c4-4e9d-8cdc-d24eabc61164 req-8fd44704-ac88-4554-825b-79165577414d service nova] Acquiring lock "refresh_cache-8adadb2e-2a20-45b1-bed8-34e09df25f39" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.395627] env[70020]: DEBUG oslo_concurrency.lockutils [req-50b3748a-78c4-4e9d-8cdc-d24eabc61164 req-8fd44704-ac88-4554-825b-79165577414d service nova] Acquired lock "refresh_cache-8adadb2e-2a20-45b1-bed8-34e09df25f39" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 699.395786] env[70020]: DEBUG nova.network.neutron [req-50b3748a-78c4-4e9d-8cdc-d24eabc61164 req-8fd44704-ac88-4554-825b-79165577414d service nova] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Refreshing network info cache for port 4b681dd6-fab3-4812-988e-26b219b6c5c3 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 699.406743] env[70020]: DEBUG oslo_vmware.api [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617942, 'name': RemoveSnapshot_Task} progress is 84%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.413761] env[70020]: DEBUG oslo_vmware.api [None req-e3578fc1-7c65-499b-8859-80b834f3d8a9 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Task: {'id': task-3617944, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.2321} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.414031] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3578fc1-7c65-499b-8859-80b834f3d8a9 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 699.414219] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e3578fc1-7c65-499b-8859-80b834f3d8a9 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 699.414397] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e3578fc1-7c65-499b-8859-80b834f3d8a9 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 699.414570] env[70020]: INFO nova.compute.manager [None req-e3578fc1-7c65-499b-8859-80b834f3d8a9 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Took 1.15 seconds to destroy the instance on the hypervisor. [ 699.414796] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e3578fc1-7c65-499b-8859-80b834f3d8a9 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 699.414987] env[70020]: DEBUG nova.compute.manager [-] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 699.415119] env[70020]: DEBUG nova.network.neutron [-] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 699.536598] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.633s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 699.537112] env[70020]: DEBUG nova.compute.manager [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 699.541829] env[70020]: DEBUG oslo_concurrency.lockutils [None req-31006f5b-23ce-45a2-8bd2-c1f83f75c4db tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.229s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 699.542520] env[70020]: DEBUG nova.objects.instance [None req-31006f5b-23ce-45a2-8bd2-c1f83f75c4db tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Lazy-loading 'resources' on Instance uuid 6a114dce-7ed3-46e1-9d50-c3dd6efd340c {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 699.561435] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 699.638401] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617947, 'name': CreateVM_Task, 'duration_secs': 0.357774} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.638579] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 699.639698] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.639698] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 699.639832] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 699.640186] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83c50874-cccf-42ae-9500-4a56358d1be7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.645722] env[70020]: DEBUG oslo_vmware.api [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 699.645722] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52771749-da67-811c-e0b2-0439c0d04719" [ 699.645722] env[70020]: _type = "Task" [ 699.645722] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.656502] env[70020]: DEBUG oslo_vmware.api [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52771749-da67-811c-e0b2-0439c0d04719, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.707174] env[70020]: DEBUG oslo_vmware.api [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]524b16a2-02dc-93e9-0f65-a4337f94ff3e, 'name': SearchDatastore_Task, 'duration_secs': 0.012719} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.707476] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 699.707795] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0/1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 699.708111] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0cccbcfe-222f-4137-9931-63d250699c20 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.715046] env[70020]: DEBUG oslo_vmware.api [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 699.715046] env[70020]: value = "task-3617948" [ 699.715046] env[70020]: _type = "Task" [ 699.715046] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.723016] env[70020]: DEBUG oslo_vmware.api [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617948, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.897616] env[70020]: DEBUG oslo_vmware.api [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617942, 'name': RemoveSnapshot_Task, 'duration_secs': 1.093844} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.899925] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Deleted Snapshot of the VM instance {{(pid=70020) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 699.901042] env[70020]: INFO nova.compute.manager [None req-3785bbfc-9e58-4ec2-b328-16dc811a1116 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Took 18.49 seconds to snapshot the instance on the hypervisor. [ 699.996619] env[70020]: DEBUG oslo_concurrency.lockutils [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquiring lock "refresh_cache-4b5750d4-98ec-4c70-b214-fad97060b606" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.996801] env[70020]: DEBUG oslo_concurrency.lockutils [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquired lock "refresh_cache-4b5750d4-98ec-4c70-b214-fad97060b606" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 699.996981] env[70020]: DEBUG nova.network.neutron [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 700.043190] env[70020]: DEBUG nova.compute.utils [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 700.044897] env[70020]: DEBUG nova.compute.manager [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Not allocating networking since 'none' was specified. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 700.160148] env[70020]: DEBUG oslo_vmware.api [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52771749-da67-811c-e0b2-0439c0d04719, 'name': SearchDatastore_Task, 'duration_secs': 0.014953} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.160498] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 700.160738] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 700.161049] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 700.161211] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 700.161412] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 700.162136] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e51f510d-3917-4f4f-b7b1-0b27f12ac348 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.163952] env[70020]: DEBUG nova.network.neutron [-] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.172699] env[70020]: DEBUG nova.network.neutron [req-50b3748a-78c4-4e9d-8cdc-d24eabc61164 req-8fd44704-ac88-4554-825b-79165577414d service nova] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Updated VIF entry in instance network info cache for port 4b681dd6-fab3-4812-988e-26b219b6c5c3. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 700.172788] env[70020]: DEBUG nova.network.neutron [req-50b3748a-78c4-4e9d-8cdc-d24eabc61164 req-8fd44704-ac88-4554-825b-79165577414d service nova] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Updating instance_info_cache with network_info: [{"id": "4b681dd6-fab3-4812-988e-26b219b6c5c3", "address": "fa:16:3e:29:86:67", "network": {"id": "ba2c4d3c-4191-4de5-8533-90ca5dfc1dc0", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-599216784-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e3eae740ef84ef88aef113ed4d6e57b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b681dd6-fa", "ovs_interfaceid": "4b681dd6-fab3-4812-988e-26b219b6c5c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.178533] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 700.178744] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 700.182484] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85ca174c-6895-480d-957c-25babbbc7f04 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.191928] env[70020]: DEBUG oslo_vmware.api [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 700.191928] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52978073-2aad-3781-706a-2587c4b67bff" [ 700.191928] env[70020]: _type = "Task" [ 700.191928] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.203862] env[70020]: DEBUG oslo_vmware.api [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52978073-2aad-3781-706a-2587c4b67bff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.230935] env[70020]: DEBUG oslo_vmware.api [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617948, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.546159] env[70020]: DEBUG nova.compute.manager [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 700.573376] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46889461-d873-41fe-8b13-25dcebf3e645 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.589638] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-840bee89-b1bc-48e2-b23a-53624af4d09c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.639907] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f613ddd9-6814-4c94-a34c-781201b23038 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.649589] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc7e16ad-4eda-4a20-be6c-d808ab348215 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.668183] env[70020]: DEBUG nova.compute.provider_tree [None req-31006f5b-23ce-45a2-8bd2-c1f83f75c4db tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 700.669637] env[70020]: INFO nova.compute.manager [-] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Took 1.25 seconds to deallocate network for instance. [ 700.676439] env[70020]: DEBUG oslo_concurrency.lockutils [req-50b3748a-78c4-4e9d-8cdc-d24eabc61164 req-8fd44704-ac88-4554-825b-79165577414d service nova] Releasing lock "refresh_cache-8adadb2e-2a20-45b1-bed8-34e09df25f39" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 700.676669] env[70020]: DEBUG nova.compute.manager [req-50b3748a-78c4-4e9d-8cdc-d24eabc61164 req-8fd44704-ac88-4554-825b-79165577414d service nova] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Received event network-vif-deleted-bfb6aa7f-bef1-4a61-8430-16719d55f556 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 700.703161] env[70020]: DEBUG oslo_vmware.api [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52978073-2aad-3781-706a-2587c4b67bff, 'name': SearchDatastore_Task, 'duration_secs': 0.021117} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.703916] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-995071c1-a0c9-46a2-83ef-a31687580c8d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.711593] env[70020]: DEBUG oslo_vmware.api [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 700.711593] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]527a385b-80da-dc56-d4d7-c60a24caf3a4" [ 700.711593] env[70020]: _type = "Task" [ 700.711593] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.721390] env[70020]: DEBUG oslo_vmware.api [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]527a385b-80da-dc56-d4d7-c60a24caf3a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.734808] env[70020]: DEBUG oslo_vmware.api [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617948, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.682552} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.735085] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0/1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 700.735303] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 700.735554] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e97e8260-cc5e-4482-877d-42e52eed6dcf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.744230] env[70020]: DEBUG oslo_vmware.api [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 700.744230] env[70020]: value = "task-3617949" [ 700.744230] env[70020]: _type = "Task" [ 700.744230] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.755441] env[70020]: DEBUG oslo_vmware.api [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617949, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.763394] env[70020]: DEBUG nova.network.neutron [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Updating instance_info_cache with network_info: [{"id": "b1d9f41a-978e-4fe2-bb42-1a2ab68ce1b2", "address": "fa:16:3e:a9:91:ab", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.238", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1d9f41a-97", "ovs_interfaceid": "b1d9f41a-978e-4fe2-bb42-1a2ab68ce1b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 701.171298] env[70020]: DEBUG nova.scheduler.client.report [None req-31006f5b-23ce-45a2-8bd2-c1f83f75c4db tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 701.179080] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e3578fc1-7c65-499b-8859-80b834f3d8a9 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 701.224593] env[70020]: DEBUG oslo_vmware.api [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]527a385b-80da-dc56-d4d7-c60a24caf3a4, 'name': SearchDatastore_Task, 'duration_secs': 0.045877} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.224855] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 701.225128] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 8adadb2e-2a20-45b1-bed8-34e09df25f39/8adadb2e-2a20-45b1-bed8-34e09df25f39.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 701.225732] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a2613d50-2693-4471-9bd2-df33d8343b4c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.233395] env[70020]: DEBUG oslo_vmware.api [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 701.233395] env[70020]: value = "task-3617950" [ 701.233395] env[70020]: _type = "Task" [ 701.233395] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.243259] env[70020]: DEBUG oslo_vmware.api [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3617950, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.255208] env[70020]: DEBUG oslo_vmware.api [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617949, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074888} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.255362] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 701.256838] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d15ba087-b9d5-4fd6-bc88-a09c4529a41e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.272018] env[70020]: DEBUG oslo_concurrency.lockutils [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Releasing lock "refresh_cache-4b5750d4-98ec-4c70-b214-fad97060b606" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 701.284102] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Reconfiguring VM instance instance-0000001c to attach disk [datastore2] 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0/1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 701.284653] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8fc4025f-a88f-4c16-bf6d-003887b36b14 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.305035] env[70020]: DEBUG oslo_vmware.api [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 701.305035] env[70020]: value = "task-3617951" [ 701.305035] env[70020]: _type = "Task" [ 701.305035] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.313739] env[70020]: DEBUG oslo_vmware.api [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617951, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.421822] env[70020]: DEBUG nova.compute.manager [req-5490093b-e47c-47c1-aa05-49cf2a235aff req-3aba3e99-0a53-4331-b531-ce94ffcff5e7 service nova] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Received event network-vif-deleted-3b22879c-7b2b-4ab9-9749-21a544891657 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 701.557024] env[70020]: DEBUG nova.compute.manager [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 701.583675] env[70020]: DEBUG nova.virt.hardware [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 701.583938] env[70020]: DEBUG nova.virt.hardware [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 701.584107] env[70020]: DEBUG nova.virt.hardware [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 701.584289] env[70020]: DEBUG nova.virt.hardware [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 701.584489] env[70020]: DEBUG nova.virt.hardware [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 701.584581] env[70020]: DEBUG nova.virt.hardware [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 701.584773] env[70020]: DEBUG nova.virt.hardware [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 701.584929] env[70020]: DEBUG nova.virt.hardware [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 701.585102] env[70020]: DEBUG nova.virt.hardware [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 701.585335] env[70020]: DEBUG nova.virt.hardware [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 701.585424] env[70020]: DEBUG nova.virt.hardware [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 701.586345] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eb341b8-14aa-44d9-9877-a269b2c6f93a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.595872] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ef810ec-bd98-4452-b928-c1b913803ab0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.614489] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Instance VIF info [] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 701.620528] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Creating folder: Project (b2b3d42ca5354c1fb389acf0d712e73e). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 701.620657] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bb93ebbb-746a-45e6-b391-257528860e57 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.633578] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Created folder: Project (b2b3d42ca5354c1fb389acf0d712e73e) in parent group-v721521. [ 701.633758] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Creating folder: Instances. Parent ref: group-v721619. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 701.633992] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1bba1f33-1d4c-456d-9558-c7e771e32db0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.643832] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Created folder: Instances in parent group-v721619. [ 701.644070] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 701.645837] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 701.645837] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-250ae097-c221-46f2-91d6-8123ef135a9d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.663405] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 701.663405] env[70020]: value = "task-3617954" [ 701.663405] env[70020]: _type = "Task" [ 701.663405] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.672615] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617954, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.679702] env[70020]: DEBUG oslo_concurrency.lockutils [None req-31006f5b-23ce-45a2-8bd2-c1f83f75c4db tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.138s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 701.682145] env[70020]: DEBUG oslo_concurrency.lockutils [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.428s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 701.683829] env[70020]: INFO nova.compute.claims [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 701.705598] env[70020]: INFO nova.scheduler.client.report [None req-31006f5b-23ce-45a2-8bd2-c1f83f75c4db tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Deleted allocations for instance 6a114dce-7ed3-46e1-9d50-c3dd6efd340c [ 701.748531] env[70020]: DEBUG oslo_vmware.api [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3617950, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.811371] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eee53e70-f269-470a-92b7-ecdeb5e4988c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.821113] env[70020]: DEBUG oslo_vmware.api [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617951, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.835849] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de7b49b1-fca1-4649-ae61-e956a81071ba {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.840651] env[70020]: DEBUG oslo_concurrency.lockutils [None req-92ab9a43-2fd6-4b4b-8ea7-7f52f4db0033 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Acquiring lock "d0756709-f17b-441e-b537-df937cfbde84" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 701.840922] env[70020]: DEBUG oslo_concurrency.lockutils [None req-92ab9a43-2fd6-4b4b-8ea7-7f52f4db0033 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Lock "d0756709-f17b-441e-b537-df937cfbde84" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 701.841180] env[70020]: DEBUG oslo_concurrency.lockutils [None req-92ab9a43-2fd6-4b4b-8ea7-7f52f4db0033 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Acquiring lock "d0756709-f17b-441e-b537-df937cfbde84-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 701.841373] env[70020]: DEBUG oslo_concurrency.lockutils [None req-92ab9a43-2fd6-4b4b-8ea7-7f52f4db0033 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Lock "d0756709-f17b-441e-b537-df937cfbde84-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 701.841573] env[70020]: DEBUG oslo_concurrency.lockutils [None req-92ab9a43-2fd6-4b4b-8ea7-7f52f4db0033 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Lock "d0756709-f17b-441e-b537-df937cfbde84-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 701.847793] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Updating instance '4b5750d4-98ec-4c70-b214-fad97060b606' progress to 83 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 701.852976] env[70020]: INFO nova.compute.manager [None req-92ab9a43-2fd6-4b4b-8ea7-7f52f4db0033 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Terminating instance [ 702.174539] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617954, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.213723] env[70020]: DEBUG oslo_concurrency.lockutils [None req-31006f5b-23ce-45a2-8bd2-c1f83f75c4db tempest-ServersTestManualDisk-1313496142 tempest-ServersTestManualDisk-1313496142-project-member] Lock "6a114dce-7ed3-46e1-9d50-c3dd6efd340c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.849s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 702.245278] env[70020]: DEBUG oslo_vmware.api [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3617950, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.84919} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.245563] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 8adadb2e-2a20-45b1-bed8-34e09df25f39/8adadb2e-2a20-45b1-bed8-34e09df25f39.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 702.245785] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 702.246049] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e1b2fc74-9588-4be9-bb51-fd26257042c9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.253907] env[70020]: DEBUG oslo_vmware.api [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 702.253907] env[70020]: value = "task-3617955" [ 702.253907] env[70020]: _type = "Task" [ 702.253907] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.265234] env[70020]: DEBUG oslo_vmware.api [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3617955, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.317033] env[70020]: DEBUG oslo_vmware.api [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617951, 'name': ReconfigVM_Task, 'duration_secs': 0.670039} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.317033] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Reconfigured VM instance instance-0000001c to attach disk [datastore2] 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0/1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 702.317442] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-248cf250-318e-42f3-89b8-9c7c19779540 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.325594] env[70020]: DEBUG oslo_vmware.api [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 702.325594] env[70020]: value = "task-3617956" [ 702.325594] env[70020]: _type = "Task" [ 702.325594] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.332475] env[70020]: DEBUG oslo_vmware.api [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617956, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.355494] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 702.356107] env[70020]: DEBUG nova.compute.manager [None req-92ab9a43-2fd6-4b4b-8ea7-7f52f4db0033 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 702.356302] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-92ab9a43-2fd6-4b4b-8ea7-7f52f4db0033 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 702.356537] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-427fe863-275e-48bb-a5a8-f02fc2da2c05 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.359063] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbc688a1-4d1e-478d-97ec-b1846953dcc4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.369745] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-92ab9a43-2fd6-4b4b-8ea7-7f52f4db0033 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 702.371653] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-91ca9bb9-84a6-4490-a948-eb40b0203b69 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.373694] env[70020]: DEBUG oslo_vmware.api [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for the task: (returnval){ [ 702.373694] env[70020]: value = "task-3617957" [ 702.373694] env[70020]: _type = "Task" [ 702.373694] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.380140] env[70020]: DEBUG oslo_vmware.api [None req-92ab9a43-2fd6-4b4b-8ea7-7f52f4db0033 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Waiting for the task: (returnval){ [ 702.380140] env[70020]: value = "task-3617958" [ 702.380140] env[70020]: _type = "Task" [ 702.380140] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.383260] env[70020]: DEBUG oslo_vmware.api [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3617957, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.391374] env[70020]: DEBUG oslo_vmware.api [None req-92ab9a43-2fd6-4b4b-8ea7-7f52f4db0033 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617958, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.675430] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617954, 'name': CreateVM_Task, 'duration_secs': 0.542941} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.675770] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 702.676347] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 702.678600] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 702.678600] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 702.678600] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52ac7745-b769-4a74-97a1-fb3800dda470 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.682508] env[70020]: DEBUG oslo_vmware.api [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Waiting for the task: (returnval){ [ 702.682508] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]529053c5-1c4e-a49c-893b-31f12997939a" [ 702.682508] env[70020]: _type = "Task" [ 702.682508] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.690988] env[70020]: DEBUG oslo_vmware.api [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]529053c5-1c4e-a49c-893b-31f12997939a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.764258] env[70020]: DEBUG oslo_vmware.api [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3617955, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066392} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.766721] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 702.767698] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c733a32-9654-45df-a0ea-c74a3c192f8a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.791491] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Reconfiguring VM instance instance-0000001d to attach disk [datastore1] 8adadb2e-2a20-45b1-bed8-34e09df25f39/8adadb2e-2a20-45b1-bed8-34e09df25f39.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 702.796280] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8220def9-6ab5-4418-b722-32fb08656b07 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.815399] env[70020]: DEBUG oslo_vmware.api [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 702.815399] env[70020]: value = "task-3617959" [ 702.815399] env[70020]: _type = "Task" [ 702.815399] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.826794] env[70020]: DEBUG oslo_vmware.api [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3617959, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.834859] env[70020]: DEBUG oslo_vmware.api [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617956, 'name': Rename_Task, 'duration_secs': 0.173805} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.838085] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 702.838187] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-638661b6-7d47-4d5f-b0c9-f3dd878487c8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.843623] env[70020]: DEBUG oslo_vmware.api [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 702.843623] env[70020]: value = "task-3617960" [ 702.843623] env[70020]: _type = "Task" [ 702.843623] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.854234] env[70020]: DEBUG oslo_vmware.api [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617960, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.885034] env[70020]: DEBUG oslo_vmware.api [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3617957, 'name': PowerOnVM_Task, 'duration_secs': 0.392282} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.887770] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 702.887961] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-664be269-a262-4d67-b28c-dcc22da18f62 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Updating instance '4b5750d4-98ec-4c70-b214-fad97060b606' progress to 100 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 702.898820] env[70020]: DEBUG oslo_vmware.api [None req-92ab9a43-2fd6-4b4b-8ea7-7f52f4db0033 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617958, 'name': PowerOffVM_Task, 'duration_secs': 0.183175} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.899114] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-92ab9a43-2fd6-4b4b-8ea7-7f52f4db0033 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 702.899283] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-92ab9a43-2fd6-4b4b-8ea7-7f52f4db0033 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 702.899597] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f9914be6-1bff-49aa-a11e-2d322ca8e1bb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.975233] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-92ab9a43-2fd6-4b4b-8ea7-7f52f4db0033 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 702.975397] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-92ab9a43-2fd6-4b4b-8ea7-7f52f4db0033 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 702.976190] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-92ab9a43-2fd6-4b4b-8ea7-7f52f4db0033 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Deleting the datastore file [datastore2] d0756709-f17b-441e-b537-df937cfbde84 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 702.976190] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dbfb14d5-0345-4f71-98bd-6fbf2b7a1324 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.988020] env[70020]: DEBUG oslo_vmware.api [None req-92ab9a43-2fd6-4b4b-8ea7-7f52f4db0033 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Waiting for the task: (returnval){ [ 702.988020] env[70020]: value = "task-3617962" [ 702.988020] env[70020]: _type = "Task" [ 702.988020] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.000946] env[70020]: DEBUG oslo_vmware.api [None req-92ab9a43-2fd6-4b4b-8ea7-7f52f4db0033 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617962, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.201714] env[70020]: DEBUG oslo_vmware.api [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]529053c5-1c4e-a49c-893b-31f12997939a, 'name': SearchDatastore_Task, 'duration_secs': 0.011462} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.204757] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 703.205043] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 703.205301] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.205455] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 703.209023] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 703.209023] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c2cced39-bf76-407b-89bf-088965d098b6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.221451] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 703.221683] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 703.222734] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ea5ce65-492e-44db-aa85-49b08df5bc71 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.230088] env[70020]: DEBUG oslo_vmware.api [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Waiting for the task: (returnval){ [ 703.230088] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52c8402a-5b95-505b-ede7-041e409b59c4" [ 703.230088] env[70020]: _type = "Task" [ 703.230088] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.241334] env[70020]: DEBUG oslo_vmware.api [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52c8402a-5b95-505b-ede7-041e409b59c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.326407] env[70020]: DEBUG oslo_vmware.api [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3617959, 'name': ReconfigVM_Task, 'duration_secs': 0.330119} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.328179] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Reconfigured VM instance instance-0000001d to attach disk [datastore1] 8adadb2e-2a20-45b1-bed8-34e09df25f39/8adadb2e-2a20-45b1-bed8-34e09df25f39.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 703.328599] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e952d091-5854-4d24-a3a0-71d48e3b47da {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.331120] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-32e8d9ee-df1c-491a-92c2-aa0c4aff6f2b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.338499] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb9562b8-d0d0-4138-a54e-d01fa48aef33 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.341520] env[70020]: DEBUG oslo_vmware.api [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 703.341520] env[70020]: value = "task-3617963" [ 703.341520] env[70020]: _type = "Task" [ 703.341520] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.375598] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b35c4a3f-43c6-4148-b50e-eef79329340d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.381210] env[70020]: DEBUG oslo_vmware.api [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3617963, 'name': Rename_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.386463] env[70020]: DEBUG oslo_vmware.api [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617960, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.392576] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a303bf-db59-4ab0-bb12-e8073697c757 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.406947] env[70020]: DEBUG nova.compute.provider_tree [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 703.497260] env[70020]: DEBUG oslo_vmware.api [None req-92ab9a43-2fd6-4b4b-8ea7-7f52f4db0033 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Task: {'id': task-3617962, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.480539} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.497524] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-92ab9a43-2fd6-4b4b-8ea7-7f52f4db0033 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 703.497699] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-92ab9a43-2fd6-4b4b-8ea7-7f52f4db0033 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 703.497866] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-92ab9a43-2fd6-4b4b-8ea7-7f52f4db0033 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 703.498048] env[70020]: INFO nova.compute.manager [None req-92ab9a43-2fd6-4b4b-8ea7-7f52f4db0033 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] [instance: d0756709-f17b-441e-b537-df937cfbde84] Took 1.14 seconds to destroy the instance on the hypervisor. [ 703.498294] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-92ab9a43-2fd6-4b4b-8ea7-7f52f4db0033 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 703.498483] env[70020]: DEBUG nova.compute.manager [-] [instance: d0756709-f17b-441e-b537-df937cfbde84] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 703.498579] env[70020]: DEBUG nova.network.neutron [-] [instance: d0756709-f17b-441e-b537-df937cfbde84] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 703.741686] env[70020]: DEBUG oslo_vmware.api [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52c8402a-5b95-505b-ede7-041e409b59c4, 'name': SearchDatastore_Task, 'duration_secs': 0.046353} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.745017] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da4c8cc9-abe4-43a3-84d7-80b6fa7893d1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.747845] env[70020]: DEBUG oslo_vmware.api [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Waiting for the task: (returnval){ [ 703.747845] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5299c5be-e237-32ae-cdde-80877b264c8b" [ 703.747845] env[70020]: _type = "Task" [ 703.747845] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.756485] env[70020]: DEBUG oslo_vmware.api [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5299c5be-e237-32ae-cdde-80877b264c8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.855746] env[70020]: DEBUG oslo_vmware.api [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3617963, 'name': Rename_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.881367] env[70020]: DEBUG oslo_vmware.api [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617960, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.925027] env[70020]: DEBUG nova.compute.manager [req-60bc402e-fe86-4c1a-befe-8a22fd6ecd54 req-c791cb45-1a2f-433f-971f-56f20a47fc55 service nova] [instance: d0756709-f17b-441e-b537-df937cfbde84] Received event network-vif-deleted-1e8309fb-0a16-407f-9be0-abcac22185ff {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 703.925246] env[70020]: INFO nova.compute.manager [req-60bc402e-fe86-4c1a-befe-8a22fd6ecd54 req-c791cb45-1a2f-433f-971f-56f20a47fc55 service nova] [instance: d0756709-f17b-441e-b537-df937cfbde84] Neutron deleted interface 1e8309fb-0a16-407f-9be0-abcac22185ff; detaching it from the instance and deleting it from the info cache [ 703.925415] env[70020]: DEBUG nova.network.neutron [req-60bc402e-fe86-4c1a-befe-8a22fd6ecd54 req-c791cb45-1a2f-433f-971f-56f20a47fc55 service nova] [instance: d0756709-f17b-441e-b537-df937cfbde84] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.949636] env[70020]: ERROR nova.scheduler.client.report [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] [req-3a51f26f-8b01-48f4-bd98-bf4a9ceeda1d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-3a51f26f-8b01-48f4-bd98-bf4a9ceeda1d"}]} [ 703.973546] env[70020]: DEBUG nova.scheduler.client.report [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 703.990027] env[70020]: DEBUG nova.scheduler.client.report [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 703.990027] env[70020]: DEBUG nova.compute.provider_tree [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 704.004078] env[70020]: DEBUG nova.scheduler.client.report [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 704.032071] env[70020]: DEBUG nova.scheduler.client.report [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 704.261425] env[70020]: DEBUG oslo_vmware.api [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5299c5be-e237-32ae-cdde-80877b264c8b, 'name': SearchDatastore_Task, 'duration_secs': 0.012358} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.261984] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 704.262251] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] a09db142-60d1-4a62-8e76-1e2e3676124f/a09db142-60d1-4a62-8e76-1e2e3676124f.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 704.262525] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-355264fa-1b1c-4014-91a8-a20fbb342bb6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.271773] env[70020]: DEBUG oslo_vmware.api [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Waiting for the task: (returnval){ [ 704.271773] env[70020]: value = "task-3617964" [ 704.271773] env[70020]: _type = "Task" [ 704.271773] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.285525] env[70020]: DEBUG oslo_vmware.api [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Task: {'id': task-3617964, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.356109] env[70020]: DEBUG oslo_vmware.api [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3617963, 'name': Rename_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.385063] env[70020]: DEBUG oslo_vmware.api [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617960, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.396623] env[70020]: DEBUG nova.network.neutron [-] [instance: d0756709-f17b-441e-b537-df937cfbde84] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 704.426929] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2846aa56-4eef-4cfb-be8c-05126d618886 tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Acquiring lock "3501a6fc-f090-4098-8f63-57a97bd61f1b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 704.427217] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2846aa56-4eef-4cfb-be8c-05126d618886 tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Lock "3501a6fc-f090-4098-8f63-57a97bd61f1b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 704.428390] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2846aa56-4eef-4cfb-be8c-05126d618886 tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Acquiring lock "3501a6fc-f090-4098-8f63-57a97bd61f1b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 704.428390] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2846aa56-4eef-4cfb-be8c-05126d618886 tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Lock "3501a6fc-f090-4098-8f63-57a97bd61f1b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 704.428390] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2846aa56-4eef-4cfb-be8c-05126d618886 tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Lock "3501a6fc-f090-4098-8f63-57a97bd61f1b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 704.429642] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e486a6b2-715f-44f5-9813-d70202e0dfde {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.434254] env[70020]: INFO nova.compute.manager [None req-2846aa56-4eef-4cfb-be8c-05126d618886 tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Terminating instance [ 704.448360] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbc45755-b7c0-49a9-adff-82870854c8a6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.486761] env[70020]: DEBUG nova.compute.manager [req-60bc402e-fe86-4c1a-befe-8a22fd6ecd54 req-c791cb45-1a2f-433f-971f-56f20a47fc55 service nova] [instance: d0756709-f17b-441e-b537-df937cfbde84] Detach interface failed, port_id=1e8309fb-0a16-407f-9be0-abcac22185ff, reason: Instance d0756709-f17b-441e-b537-df937cfbde84 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 704.637038] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b6bbd26-5851-4719-92e1-6f50e5e80bde {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.648506] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63ae492a-d4c6-4b56-a69c-9ee3dced093f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.686924] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24032bbe-4fae-4aac-adf9-2cbb8ad08c85 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.695778] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f6326ed-5bfc-406b-86ca-efe1044a00fa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.711446] env[70020]: DEBUG nova.compute.provider_tree [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 704.785482] env[70020]: DEBUG oslo_vmware.api [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Task: {'id': task-3617964, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.853130] env[70020]: DEBUG oslo_vmware.api [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3617963, 'name': Rename_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.884166] env[70020]: DEBUG oslo_vmware.api [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617960, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.899513] env[70020]: INFO nova.compute.manager [-] [instance: d0756709-f17b-441e-b537-df937cfbde84] Took 1.40 seconds to deallocate network for instance. [ 704.943845] env[70020]: DEBUG nova.compute.manager [None req-2846aa56-4eef-4cfb-be8c-05126d618886 tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 704.943845] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2846aa56-4eef-4cfb-be8c-05126d618886 tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 704.944979] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4243de6f-f2cc-4e13-ba4a-9d906c43ac61 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.953215] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2846aa56-4eef-4cfb-be8c-05126d618886 tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 704.953510] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2ead5546-9a7a-4f80-9774-3c26531d5812 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.960526] env[70020]: DEBUG oslo_vmware.api [None req-2846aa56-4eef-4cfb-be8c-05126d618886 tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Waiting for the task: (returnval){ [ 704.960526] env[70020]: value = "task-3617965" [ 704.960526] env[70020]: _type = "Task" [ 704.960526] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.970496] env[70020]: DEBUG oslo_vmware.api [None req-2846aa56-4eef-4cfb-be8c-05126d618886 tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Task: {'id': task-3617965, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.255427] env[70020]: DEBUG nova.scheduler.client.report [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Updated inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with generation 55 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 705.255427] env[70020]: DEBUG nova.compute.provider_tree [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Updating resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d generation from 55 to 56 during operation: update_inventory {{(pid=70020) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 705.255622] env[70020]: DEBUG nova.compute.provider_tree [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 705.284451] env[70020]: DEBUG oslo_vmware.api [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Task: {'id': task-3617964, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.708664} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.284773] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] a09db142-60d1-4a62-8e76-1e2e3676124f/a09db142-60d1-4a62-8e76-1e2e3676124f.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 705.284943] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 705.285240] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4257037c-1224-4ba3-ba01-51bec9de9faa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.292179] env[70020]: DEBUG oslo_vmware.api [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Waiting for the task: (returnval){ [ 705.292179] env[70020]: value = "task-3617966" [ 705.292179] env[70020]: _type = "Task" [ 705.292179] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.299833] env[70020]: DEBUG oslo_vmware.api [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Task: {'id': task-3617966, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.356299] env[70020]: DEBUG oslo_vmware.api [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3617963, 'name': Rename_Task, 'duration_secs': 1.897209} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.356580] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 705.356863] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8ab9f1b2-1583-4e49-9971-b6c9d641fb6c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.364387] env[70020]: DEBUG oslo_vmware.api [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 705.364387] env[70020]: value = "task-3617967" [ 705.364387] env[70020]: _type = "Task" [ 705.364387] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.377395] env[70020]: DEBUG oslo_vmware.api [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3617967, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.386013] env[70020]: DEBUG oslo_vmware.api [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617960, 'name': PowerOnVM_Task, 'duration_secs': 2.448115} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.386317] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 705.386422] env[70020]: INFO nova.compute.manager [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Took 11.53 seconds to spawn the instance on the hypervisor. [ 705.386752] env[70020]: DEBUG nova.compute.manager [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 705.387451] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26c70cf3-f3e0-465f-80b9-99fde710ac76 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.406230] env[70020]: DEBUG oslo_concurrency.lockutils [None req-92ab9a43-2fd6-4b4b-8ea7-7f52f4db0033 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 705.469999] env[70020]: DEBUG oslo_vmware.api [None req-2846aa56-4eef-4cfb-be8c-05126d618886 tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Task: {'id': task-3617965, 'name': PowerOffVM_Task, 'duration_secs': 0.424275} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.469999] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2846aa56-4eef-4cfb-be8c-05126d618886 tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 705.469999] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2846aa56-4eef-4cfb-be8c-05126d618886 tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 705.470253] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2b77f492-56d9-4f37-9555-a686bc8646eb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.532855] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2846aa56-4eef-4cfb-be8c-05126d618886 tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 705.533202] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2846aa56-4eef-4cfb-be8c-05126d618886 tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 705.533490] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-2846aa56-4eef-4cfb-be8c-05126d618886 tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Deleting the datastore file [datastore2] 3501a6fc-f090-4098-8f63-57a97bd61f1b {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 705.533868] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9fcf4443-86b5-41f1-b39f-4433d0502073 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.541122] env[70020]: DEBUG oslo_vmware.api [None req-2846aa56-4eef-4cfb-be8c-05126d618886 tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Waiting for the task: (returnval){ [ 705.541122] env[70020]: value = "task-3617969" [ 705.541122] env[70020]: _type = "Task" [ 705.541122] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.552313] env[70020]: DEBUG oslo_vmware.api [None req-2846aa56-4eef-4cfb-be8c-05126d618886 tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Task: {'id': task-3617969, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.598436] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6bb5f35a-f742-49a2-a564-e2dc69aa44de tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquiring lock "4b5750d4-98ec-4c70-b214-fad97060b606" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 705.598853] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6bb5f35a-f742-49a2-a564-e2dc69aa44de tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Lock "4b5750d4-98ec-4c70-b214-fad97060b606" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 705.599155] env[70020]: DEBUG nova.compute.manager [None req-6bb5f35a-f742-49a2-a564-e2dc69aa44de tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Going to confirm migration 1 {{(pid=70020) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 705.761556] env[70020]: DEBUG oslo_concurrency.lockutils [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.079s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 705.762150] env[70020]: DEBUG nova.compute.manager [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 705.765215] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 33.163s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 705.765409] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 705.765918] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=70020) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 705.765918] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9eb3a214-7a4b-4fda-831a-d21a46069753 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.294s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 705.766081] env[70020]: DEBUG nova.objects.instance [None req-9eb3a214-7a4b-4fda-831a-d21a46069753 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Lazy-loading 'resources' on Instance uuid 79d98176-b566-4349-ad10-c2ea6fdbc657 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 705.768033] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3e0ed24-cad9-4beb-9840-f70779a37447 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.776585] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c69e25d-ee92-4f82-bef5-633e5fbee487 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.792094] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adb405a6-3a0f-40c2-83c4-08fb24867553 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.805962] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-212d3421-af98-48ad-b5b5-b8e2ecc1c1e1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.809393] env[70020]: DEBUG oslo_vmware.api [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Task: {'id': task-3617966, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068704} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.810023] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 705.811284] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b4ffca1-6da5-41e9-bdd1-ca88669c571b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.839270] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179108MB free_disk=75GB free_vcpus=48 pci_devices=None {{(pid=70020) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 705.839449] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 705.859432] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Reconfiguring VM instance instance-0000001e to attach disk [datastore1] a09db142-60d1-4a62-8e76-1e2e3676124f/a09db142-60d1-4a62-8e76-1e2e3676124f.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 705.859877] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d9aaa904-1c5e-4626-a7b8-188871b9816f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.881008] env[70020]: DEBUG oslo_vmware.api [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Waiting for the task: (returnval){ [ 705.881008] env[70020]: value = "task-3617970" [ 705.881008] env[70020]: _type = "Task" [ 705.881008] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.883685] env[70020]: DEBUG oslo_vmware.api [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3617967, 'name': PowerOnVM_Task, 'duration_secs': 0.493127} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.886771] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 705.886771] env[70020]: INFO nova.compute.manager [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Took 9.24 seconds to spawn the instance on the hypervisor. [ 705.886875] env[70020]: DEBUG nova.compute.manager [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 705.887545] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c90ca7d1-1787-44d2-87ec-bcd0245aac87 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.907928] env[70020]: DEBUG oslo_vmware.api [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Task: {'id': task-3617970, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.908531] env[70020]: INFO nova.compute.manager [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Took 45.26 seconds to build instance. [ 706.051052] env[70020]: DEBUG oslo_vmware.api [None req-2846aa56-4eef-4cfb-be8c-05126d618886 tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Task: {'id': task-3617969, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.226992} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.051343] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-2846aa56-4eef-4cfb-be8c-05126d618886 tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 706.051527] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2846aa56-4eef-4cfb-be8c-05126d618886 tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 706.051700] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2846aa56-4eef-4cfb-be8c-05126d618886 tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 706.051923] env[70020]: INFO nova.compute.manager [None req-2846aa56-4eef-4cfb-be8c-05126d618886 tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Took 1.11 seconds to destroy the instance on the hypervisor. [ 706.052188] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2846aa56-4eef-4cfb-be8c-05126d618886 tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 706.052378] env[70020]: DEBUG nova.compute.manager [-] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 706.052471] env[70020]: DEBUG nova.network.neutron [-] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 706.228523] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6bb5f35a-f742-49a2-a564-e2dc69aa44de tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquiring lock "refresh_cache-4b5750d4-98ec-4c70-b214-fad97060b606" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.228784] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6bb5f35a-f742-49a2-a564-e2dc69aa44de tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquired lock "refresh_cache-4b5750d4-98ec-4c70-b214-fad97060b606" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 706.229101] env[70020]: DEBUG nova.network.neutron [None req-6bb5f35a-f742-49a2-a564-e2dc69aa44de tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 706.229415] env[70020]: DEBUG nova.objects.instance [None req-6bb5f35a-f742-49a2-a564-e2dc69aa44de tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Lazy-loading 'info_cache' on Instance uuid 4b5750d4-98ec-4c70-b214-fad97060b606 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 706.270076] env[70020]: DEBUG nova.compute.utils [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 706.273566] env[70020]: DEBUG nova.compute.manager [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 706.274479] env[70020]: DEBUG nova.network.neutron [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 706.340430] env[70020]: DEBUG nova.policy [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '53b77842769c4ce5829d2cbb44ab704a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c4746040d0494f5faf15ce3baffcc5c9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 706.396995] env[70020]: DEBUG oslo_vmware.api [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Task: {'id': task-3617970, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.415969] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ba9356ec-3b95-460c-9f23-85b55713c7d9 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.210s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 706.424186] env[70020]: INFO nova.compute.manager [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Took 41.02 seconds to build instance. [ 706.778251] env[70020]: DEBUG nova.compute.manager [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 706.870575] env[70020]: DEBUG nova.network.neutron [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Successfully created port: ae7ca477-594c-47ae-a235-e80874d36402 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 706.887561] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8055b1f-06c1-4ab0-b1fc-496d91e5dc77 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.899381] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2c736a8-5662-43bd-b210-3df9f2322bd2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.902626] env[70020]: DEBUG oslo_vmware.api [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Task: {'id': task-3617970, 'name': ReconfigVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.930964] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-492884c7-347b-410b-905d-4e050f77b827 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.933985] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1e22d32e-78f4-4f7a-b0ce-3a3ab4d293d0 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "8adadb2e-2a20-45b1-bed8-34e09df25f39" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.698s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 706.934342] env[70020]: DEBUG nova.compute.manager [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 706.943125] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05fa22c6-d044-4965-9900-72054e51822f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.957730] env[70020]: DEBUG nova.compute.provider_tree [None req-9eb3a214-7a4b-4fda-831a-d21a46069753 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 707.034863] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99fce9fb-5906-48ef-ae33-8091575a3583 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.041508] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-94e338cb-de5c-4276-8562-b3667344bf1b tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Suspending the VM {{(pid=70020) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 707.041763] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-c5bc21f5-98f6-4a83-9353-5ef264d10501 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.047734] env[70020]: DEBUG oslo_vmware.api [None req-94e338cb-de5c-4276-8562-b3667344bf1b tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 707.047734] env[70020]: value = "task-3617971" [ 707.047734] env[70020]: _type = "Task" [ 707.047734] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.061495] env[70020]: DEBUG oslo_vmware.api [None req-94e338cb-de5c-4276-8562-b3667344bf1b tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617971, 'name': SuspendVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.067017] env[70020]: DEBUG nova.compute.manager [req-e5fdc3c3-c8e9-426a-ac78-7a1701cc0f29 req-088702c6-49c4-42d2-9358-8bf5c30bd2a4 service nova] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Received event network-vif-deleted-c5dfdf74-4ed7-460a-b458-cb45cbc910c2 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 707.067077] env[70020]: INFO nova.compute.manager [req-e5fdc3c3-c8e9-426a-ac78-7a1701cc0f29 req-088702c6-49c4-42d2-9358-8bf5c30bd2a4 service nova] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Neutron deleted interface c5dfdf74-4ed7-460a-b458-cb45cbc910c2; detaching it from the instance and deleting it from the info cache [ 707.067243] env[70020]: DEBUG nova.network.neutron [req-e5fdc3c3-c8e9-426a-ac78-7a1701cc0f29 req-088702c6-49c4-42d2-9358-8bf5c30bd2a4 service nova] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.317552] env[70020]: DEBUG nova.network.neutron [-] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.398741] env[70020]: DEBUG oslo_vmware.api [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Task: {'id': task-3617970, 'name': ReconfigVM_Task, 'duration_secs': 1.030144} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.398741] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Reconfigured VM instance instance-0000001e to attach disk [datastore1] a09db142-60d1-4a62-8e76-1e2e3676124f/a09db142-60d1-4a62-8e76-1e2e3676124f.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 707.399365] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1d7c4a23-435b-46a4-98c9-c6f80cd8bcb2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.408699] env[70020]: DEBUG oslo_vmware.api [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Waiting for the task: (returnval){ [ 707.408699] env[70020]: value = "task-3617972" [ 707.408699] env[70020]: _type = "Task" [ 707.408699] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.418699] env[70020]: DEBUG oslo_vmware.api [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Task: {'id': task-3617972, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.440931] env[70020]: DEBUG nova.compute.manager [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 707.469978] env[70020]: DEBUG oslo_concurrency.lockutils [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 707.504247] env[70020]: DEBUG nova.scheduler.client.report [None req-9eb3a214-7a4b-4fda-831a-d21a46069753 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Updated inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with generation 56 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 707.504247] env[70020]: DEBUG nova.compute.provider_tree [None req-9eb3a214-7a4b-4fda-831a-d21a46069753 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Updating resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d generation from 56 to 57 during operation: update_inventory {{(pid=70020) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 707.504247] env[70020]: DEBUG nova.compute.provider_tree [None req-9eb3a214-7a4b-4fda-831a-d21a46069753 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 707.557110] env[70020]: DEBUG oslo_vmware.api [None req-94e338cb-de5c-4276-8562-b3667344bf1b tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617971, 'name': SuspendVM_Task} progress is 62%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.573847] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9b400e4e-6f3b-4821-8516-e2024f1828d6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.584782] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d4723b8-e626-4faf-a951-f5e5cb396a77 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.627486] env[70020]: DEBUG nova.compute.manager [req-e5fdc3c3-c8e9-426a-ac78-7a1701cc0f29 req-088702c6-49c4-42d2-9358-8bf5c30bd2a4 service nova] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Detach interface failed, port_id=c5dfdf74-4ed7-460a-b458-cb45cbc910c2, reason: Instance 3501a6fc-f090-4098-8f63-57a97bd61f1b could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 707.706733] env[70020]: DEBUG nova.network.neutron [None req-6bb5f35a-f742-49a2-a564-e2dc69aa44de tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Updating instance_info_cache with network_info: [{"id": "b1d9f41a-978e-4fe2-bb42-1a2ab68ce1b2", "address": "fa:16:3e:a9:91:ab", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.238", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1d9f41a-97", "ovs_interfaceid": "b1d9f41a-978e-4fe2-bb42-1a2ab68ce1b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.789973] env[70020]: DEBUG nova.compute.manager [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 707.821356] env[70020]: DEBUG nova.virt.hardware [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 707.821810] env[70020]: DEBUG nova.virt.hardware [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 707.822014] env[70020]: DEBUG nova.virt.hardware [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 707.822216] env[70020]: DEBUG nova.virt.hardware [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 707.822362] env[70020]: DEBUG nova.virt.hardware [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 707.822514] env[70020]: DEBUG nova.virt.hardware [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 707.822721] env[70020]: DEBUG nova.virt.hardware [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 707.822878] env[70020]: DEBUG nova.virt.hardware [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 707.823064] env[70020]: DEBUG nova.virt.hardware [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 707.823232] env[70020]: DEBUG nova.virt.hardware [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 707.823403] env[70020]: DEBUG nova.virt.hardware [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 707.824302] env[70020]: INFO nova.compute.manager [-] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Took 1.77 seconds to deallocate network for instance. [ 707.825877] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98ea26f5-35f9-4a53-bcde-bda22909a173 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.839843] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e0a3520-8a85-4326-b583-7b91762c579e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.922460] env[70020]: DEBUG oslo_vmware.api [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Task: {'id': task-3617972, 'name': Rename_Task, 'duration_secs': 0.200023} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.922963] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 707.925106] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c3ee8f1a-d97f-4fcd-a40d-78a39d9e80a9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.930681] env[70020]: DEBUG oslo_vmware.api [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Waiting for the task: (returnval){ [ 707.930681] env[70020]: value = "task-3617973" [ 707.930681] env[70020]: _type = "Task" [ 707.930681] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.941044] env[70020]: DEBUG oslo_vmware.api [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Task: {'id': task-3617973, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.973416] env[70020]: DEBUG oslo_concurrency.lockutils [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 708.013222] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9eb3a214-7a4b-4fda-831a-d21a46069753 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.245s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 708.013541] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.870s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 708.015491] env[70020]: INFO nova.compute.claims [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 708.049261] env[70020]: INFO nova.scheduler.client.report [None req-9eb3a214-7a4b-4fda-831a-d21a46069753 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Deleted allocations for instance 79d98176-b566-4349-ad10-c2ea6fdbc657 [ 708.062867] env[70020]: DEBUG oslo_vmware.api [None req-94e338cb-de5c-4276-8562-b3667344bf1b tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617971, 'name': SuspendVM_Task, 'duration_secs': 0.847208} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.063751] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-94e338cb-de5c-4276-8562-b3667344bf1b tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Suspended the VM {{(pid=70020) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 708.063751] env[70020]: DEBUG nova.compute.manager [None req-94e338cb-de5c-4276-8562-b3667344bf1b tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 708.064366] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c81b87c0-faf8-470b-b9dc-ac7481ab1ade {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.209915] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6bb5f35a-f742-49a2-a564-e2dc69aa44de tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Releasing lock "refresh_cache-4b5750d4-98ec-4c70-b214-fad97060b606" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 708.210335] env[70020]: DEBUG nova.objects.instance [None req-6bb5f35a-f742-49a2-a564-e2dc69aa44de tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Lazy-loading 'migration_context' on Instance uuid 4b5750d4-98ec-4c70-b214-fad97060b606 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 708.339693] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2846aa56-4eef-4cfb-be8c-05126d618886 tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 708.375127] env[70020]: DEBUG nova.compute.manager [req-ca0624b9-5a9d-40e0-a714-b21ec5c25fdd req-1a2604f4-0f4a-4929-b536-00c56c6c4ca6 service nova] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Received event network-changed-4b681dd6-fab3-4812-988e-26b219b6c5c3 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 708.375127] env[70020]: DEBUG nova.compute.manager [req-ca0624b9-5a9d-40e0-a714-b21ec5c25fdd req-1a2604f4-0f4a-4929-b536-00c56c6c4ca6 service nova] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Refreshing instance network info cache due to event network-changed-4b681dd6-fab3-4812-988e-26b219b6c5c3. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 708.375127] env[70020]: DEBUG oslo_concurrency.lockutils [req-ca0624b9-5a9d-40e0-a714-b21ec5c25fdd req-1a2604f4-0f4a-4929-b536-00c56c6c4ca6 service nova] Acquiring lock "refresh_cache-8adadb2e-2a20-45b1-bed8-34e09df25f39" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.375231] env[70020]: DEBUG oslo_concurrency.lockutils [req-ca0624b9-5a9d-40e0-a714-b21ec5c25fdd req-1a2604f4-0f4a-4929-b536-00c56c6c4ca6 service nova] Acquired lock "refresh_cache-8adadb2e-2a20-45b1-bed8-34e09df25f39" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 708.375323] env[70020]: DEBUG nova.network.neutron [req-ca0624b9-5a9d-40e0-a714-b21ec5c25fdd req-1a2604f4-0f4a-4929-b536-00c56c6c4ca6 service nova] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Refreshing network info cache for port 4b681dd6-fab3-4812-988e-26b219b6c5c3 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 708.443473] env[70020]: DEBUG oslo_vmware.api [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Task: {'id': task-3617973, 'name': PowerOnVM_Task} progress is 78%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.561815] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9eb3a214-7a4b-4fda-831a-d21a46069753 tempest-ServersTestFqdnHostnames-357486929 tempest-ServersTestFqdnHostnames-357486929-project-member] Lock "79d98176-b566-4349-ad10-c2ea6fdbc657" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.308s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 708.714644] env[70020]: DEBUG nova.objects.base [None req-6bb5f35a-f742-49a2-a564-e2dc69aa44de tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Object Instance<4b5750d4-98ec-4c70-b214-fad97060b606> lazy-loaded attributes: info_cache,migration_context {{(pid=70020) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 708.716156] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd7734a4-6364-47f0-9c50-be01f7f4eae7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.735676] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3fc221f1-de67-488d-bb3e-08b738f3f983 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.741525] env[70020]: DEBUG oslo_vmware.api [None req-6bb5f35a-f742-49a2-a564-e2dc69aa44de tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for the task: (returnval){ [ 708.741525] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52061df3-8c4d-1056-5e49-2b5b33f69c2b" [ 708.741525] env[70020]: _type = "Task" [ 708.741525] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.749128] env[70020]: DEBUG oslo_vmware.api [None req-6bb5f35a-f742-49a2-a564-e2dc69aa44de tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52061df3-8c4d-1056-5e49-2b5b33f69c2b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.947272] env[70020]: DEBUG oslo_vmware.api [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Task: {'id': task-3617973, 'name': PowerOnVM_Task, 'duration_secs': 0.698471} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.947533] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 708.947725] env[70020]: INFO nova.compute.manager [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Took 7.39 seconds to spawn the instance on the hypervisor. [ 708.947988] env[70020]: DEBUG nova.compute.manager [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 708.949576] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c368945d-b74d-48e0-988b-56e13c79e7f8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.047241] env[70020]: DEBUG nova.network.neutron [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Successfully updated port: ae7ca477-594c-47ae-a235-e80874d36402 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 709.195081] env[70020]: DEBUG nova.network.neutron [req-ca0624b9-5a9d-40e0-a714-b21ec5c25fdd req-1a2604f4-0f4a-4929-b536-00c56c6c4ca6 service nova] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Updated VIF entry in instance network info cache for port 4b681dd6-fab3-4812-988e-26b219b6c5c3. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 709.195081] env[70020]: DEBUG nova.network.neutron [req-ca0624b9-5a9d-40e0-a714-b21ec5c25fdd req-1a2604f4-0f4a-4929-b536-00c56c6c4ca6 service nova] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Updating instance_info_cache with network_info: [{"id": "4b681dd6-fab3-4812-988e-26b219b6c5c3", "address": "fa:16:3e:29:86:67", "network": {"id": "ba2c4d3c-4191-4de5-8533-90ca5dfc1dc0", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-599216784-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e3eae740ef84ef88aef113ed4d6e57b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b681dd6-fa", "ovs_interfaceid": "4b681dd6-fab3-4812-988e-26b219b6c5c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 709.252182] env[70020]: DEBUG oslo_vmware.api [None req-6bb5f35a-f742-49a2-a564-e2dc69aa44de tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52061df3-8c4d-1056-5e49-2b5b33f69c2b, 'name': SearchDatastore_Task, 'duration_secs': 0.015226} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.255057] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6bb5f35a-f742-49a2-a564-e2dc69aa44de tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 709.352421] env[70020]: DEBUG nova.compute.manager [req-52bc3a47-2b5a-4359-8e2c-db811e9810c0 req-8c8570a9-c877-4b4a-81a3-d838d8db869f service nova] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Received event network-vif-plugged-ae7ca477-594c-47ae-a235-e80874d36402 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 709.352588] env[70020]: DEBUG oslo_concurrency.lockutils [req-52bc3a47-2b5a-4359-8e2c-db811e9810c0 req-8c8570a9-c877-4b4a-81a3-d838d8db869f service nova] Acquiring lock "19036f6f-2ee3-4ea5-82fa-b510bf903922-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 709.353331] env[70020]: DEBUG oslo_concurrency.lockutils [req-52bc3a47-2b5a-4359-8e2c-db811e9810c0 req-8c8570a9-c877-4b4a-81a3-d838d8db869f service nova] Lock "19036f6f-2ee3-4ea5-82fa-b510bf903922-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 709.353468] env[70020]: DEBUG oslo_concurrency.lockutils [req-52bc3a47-2b5a-4359-8e2c-db811e9810c0 req-8c8570a9-c877-4b4a-81a3-d838d8db869f service nova] Lock "19036f6f-2ee3-4ea5-82fa-b510bf903922-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 709.354338] env[70020]: DEBUG nova.compute.manager [req-52bc3a47-2b5a-4359-8e2c-db811e9810c0 req-8c8570a9-c877-4b4a-81a3-d838d8db869f service nova] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] No waiting events found dispatching network-vif-plugged-ae7ca477-594c-47ae-a235-e80874d36402 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 709.354338] env[70020]: WARNING nova.compute.manager [req-52bc3a47-2b5a-4359-8e2c-db811e9810c0 req-8c8570a9-c877-4b4a-81a3-d838d8db869f service nova] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Received unexpected event network-vif-plugged-ae7ca477-594c-47ae-a235-e80874d36402 for instance with vm_state building and task_state spawning. [ 709.354338] env[70020]: DEBUG nova.compute.manager [req-52bc3a47-2b5a-4359-8e2c-db811e9810c0 req-8c8570a9-c877-4b4a-81a3-d838d8db869f service nova] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Received event network-changed-ae7ca477-594c-47ae-a235-e80874d36402 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 709.354338] env[70020]: DEBUG nova.compute.manager [req-52bc3a47-2b5a-4359-8e2c-db811e9810c0 req-8c8570a9-c877-4b4a-81a3-d838d8db869f service nova] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Refreshing instance network info cache due to event network-changed-ae7ca477-594c-47ae-a235-e80874d36402. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 709.354338] env[70020]: DEBUG oslo_concurrency.lockutils [req-52bc3a47-2b5a-4359-8e2c-db811e9810c0 req-8c8570a9-c877-4b4a-81a3-d838d8db869f service nova] Acquiring lock "refresh_cache-19036f6f-2ee3-4ea5-82fa-b510bf903922" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.354647] env[70020]: DEBUG oslo_concurrency.lockutils [req-52bc3a47-2b5a-4359-8e2c-db811e9810c0 req-8c8570a9-c877-4b4a-81a3-d838d8db869f service nova] Acquired lock "refresh_cache-19036f6f-2ee3-4ea5-82fa-b510bf903922" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 709.354647] env[70020]: DEBUG nova.network.neutron [req-52bc3a47-2b5a-4359-8e2c-db811e9810c0 req-8c8570a9-c877-4b4a-81a3-d838d8db869f service nova] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Refreshing network info cache for port ae7ca477-594c-47ae-a235-e80874d36402 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 709.472067] env[70020]: INFO nova.compute.manager [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Took 40.39 seconds to build instance. [ 709.553863] env[70020]: DEBUG oslo_concurrency.lockutils [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Acquiring lock "refresh_cache-19036f6f-2ee3-4ea5-82fa-b510bf903922" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.591547] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1cd47f5-e412-4581-9a0d-87630c79449b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.600337] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcde9b72-01d5-4c6b-b1e5-dabc36bc3454 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.633467] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb5e940a-ec5b-4f83-af83-f231d77d7921 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.642055] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23d182c7-9f02-4d0c-ad98-a8c3b7932bfb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.649072] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquiring lock "c56279e2-0fc6-4546-854c-82e5fda0e7a7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 709.649259] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "c56279e2-0fc6-4546-854c-82e5fda0e7a7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 709.662627] env[70020]: DEBUG nova.compute.provider_tree [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 709.687382] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquiring lock "48efbd17-ff4e-426a-a135-f43cae8c97d0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 709.687754] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "48efbd17-ff4e-426a-a135-f43cae8c97d0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 709.698513] env[70020]: DEBUG oslo_concurrency.lockutils [req-ca0624b9-5a9d-40e0-a714-b21ec5c25fdd req-1a2604f4-0f4a-4929-b536-00c56c6c4ca6 service nova] Releasing lock "refresh_cache-8adadb2e-2a20-45b1-bed8-34e09df25f39" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 709.894899] env[70020]: DEBUG nova.network.neutron [req-52bc3a47-2b5a-4359-8e2c-db811e9810c0 req-8c8570a9-c877-4b4a-81a3-d838d8db869f service nova] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 709.974796] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ad09a78e-d34d-4840-9f9f-468d8eab93dd tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Lock "a09db142-60d1-4a62-8e76-1e2e3676124f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.722s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 710.044190] env[70020]: DEBUG nova.network.neutron [req-52bc3a47-2b5a-4359-8e2c-db811e9810c0 req-8c8570a9-c877-4b4a-81a3-d838d8db869f service nova] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 710.166170] env[70020]: DEBUG nova.scheduler.client.report [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 710.479661] env[70020]: DEBUG nova.compute.manager [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 710.550160] env[70020]: DEBUG oslo_concurrency.lockutils [req-52bc3a47-2b5a-4359-8e2c-db811e9810c0 req-8c8570a9-c877-4b4a-81a3-d838d8db869f service nova] Releasing lock "refresh_cache-19036f6f-2ee3-4ea5-82fa-b510bf903922" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 710.550645] env[70020]: DEBUG oslo_concurrency.lockutils [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Acquired lock "refresh_cache-19036f6f-2ee3-4ea5-82fa-b510bf903922" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 710.550843] env[70020]: DEBUG nova.network.neutron [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 710.670858] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.658s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 710.671436] env[70020]: DEBUG nova.compute.manager [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 710.675297] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2efc0ca8-1101-4645-8662-ea4fe4b05789 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.917s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 710.675564] env[70020]: DEBUG nova.objects.instance [None req-2efc0ca8-1101-4645-8662-ea4fe4b05789 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Lazy-loading 'resources' on Instance uuid 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 711.007414] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 711.074692] env[70020]: DEBUG nova.compute.manager [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 711.076217] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9761137d-2272-46de-8344-a9e7cddcb8a1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.089033] env[70020]: DEBUG nova.network.neutron [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 711.181754] env[70020]: DEBUG nova.compute.utils [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 711.184146] env[70020]: DEBUG nova.compute.manager [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 711.186028] env[70020]: DEBUG nova.network.neutron [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 711.235223] env[70020]: DEBUG nova.compute.manager [None req-04e10bb2-5539-45dc-9a02-ad4df5b977ae tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 711.236328] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0eef962-01bf-4cf5-86fe-fb80f2ed1717 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.269055] env[70020]: DEBUG nova.policy [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '372fa4ad2b5344e48845f3d23331647f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a3b881f355894590b537166c8bb03f52', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 711.301458] env[70020]: DEBUG nova.network.neutron [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Updating instance_info_cache with network_info: [{"id": "ae7ca477-594c-47ae-a235-e80874d36402", "address": "fa:16:3e:1c:1e:66", "network": {"id": "e06547d1-c44e-47f2-b0b3-f35fbbb9ebfa", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1997727409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c4746040d0494f5faf15ce3baffcc5c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3db2ab9e-1244-4377-b05f-ab76003f2428", "external-id": "nsx-vlan-transportzone-199", "segmentation_id": 199, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae7ca477-59", "ovs_interfaceid": "ae7ca477-594c-47ae-a235-e80874d36402", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 711.567941] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d435bc63-51a9-4b0b-b924-7eaf125c46a0 tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Acquiring lock "a09db142-60d1-4a62-8e76-1e2e3676124f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 711.568172] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d435bc63-51a9-4b0b-b924-7eaf125c46a0 tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Lock "a09db142-60d1-4a62-8e76-1e2e3676124f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 711.568228] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d435bc63-51a9-4b0b-b924-7eaf125c46a0 tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Acquiring lock "a09db142-60d1-4a62-8e76-1e2e3676124f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 711.568409] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d435bc63-51a9-4b0b-b924-7eaf125c46a0 tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Lock "a09db142-60d1-4a62-8e76-1e2e3676124f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 711.568567] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d435bc63-51a9-4b0b-b924-7eaf125c46a0 tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Lock "a09db142-60d1-4a62-8e76-1e2e3676124f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 711.574378] env[70020]: INFO nova.compute.manager [None req-d435bc63-51a9-4b0b-b924-7eaf125c46a0 tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Terminating instance [ 711.591927] env[70020]: INFO nova.compute.manager [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] instance snapshotting [ 711.592160] env[70020]: WARNING nova.compute.manager [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 711.599119] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f24f7904-168b-48f2-b8a1-216d9645cde0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.637148] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3103b59-5490-4b9d-8e3f-020584936f95 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.656528] env[70020]: DEBUG nova.network.neutron [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Successfully created port: 9c1b9c25-fd2c-4379-959d-d103fa9a5848 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 711.689202] env[70020]: DEBUG nova.compute.manager [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 711.751263] env[70020]: INFO nova.compute.manager [None req-04e10bb2-5539-45dc-9a02-ad4df5b977ae tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] instance snapshotting [ 711.752381] env[70020]: DEBUG nova.objects.instance [None req-04e10bb2-5539-45dc-9a02-ad4df5b977ae tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Lazy-loading 'flavor' on Instance uuid a09db142-60d1-4a62-8e76-1e2e3676124f {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 711.801154] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a29857e7-b96f-4671-9599-20a556a8bf17 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.806389] env[70020]: DEBUG oslo_concurrency.lockutils [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Releasing lock "refresh_cache-19036f6f-2ee3-4ea5-82fa-b510bf903922" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 711.806672] env[70020]: DEBUG nova.compute.manager [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Instance network_info: |[{"id": "ae7ca477-594c-47ae-a235-e80874d36402", "address": "fa:16:3e:1c:1e:66", "network": {"id": "e06547d1-c44e-47f2-b0b3-f35fbbb9ebfa", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1997727409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c4746040d0494f5faf15ce3baffcc5c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3db2ab9e-1244-4377-b05f-ab76003f2428", "external-id": "nsx-vlan-transportzone-199", "segmentation_id": 199, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae7ca477-59", "ovs_interfaceid": "ae7ca477-594c-47ae-a235-e80874d36402", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 711.809832] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1c:1e:66', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3db2ab9e-1244-4377-b05f-ab76003f2428', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ae7ca477-594c-47ae-a235-e80874d36402', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 711.817855] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Creating folder: Project (c4746040d0494f5faf15ce3baffcc5c9). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 711.818255] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4f94db81-c760-44bc-b8c8-2dbb68ded177 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.820997] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7c0023d-5b99-46d2-9714-4b56bbda1f22 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.857173] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa1f1d4a-29ef-475d-9b93-af24eb60ac7c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.859833] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Created folder: Project (c4746040d0494f5faf15ce3baffcc5c9) in parent group-v721521. [ 711.860036] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Creating folder: Instances. Parent ref: group-v721622. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 711.860382] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ce4f513e-0abd-48de-9250-b0457fac746b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.867308] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-799cfa75-5781-43f1-958a-04ecbf7f2b34 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.875351] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Created folder: Instances in parent group-v721622. [ 711.875743] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 711.876308] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 711.876584] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c2405683-4ce3-4efb-81b4-16ffeabc3476 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.906755] env[70020]: DEBUG nova.compute.provider_tree [None req-2efc0ca8-1101-4645-8662-ea4fe4b05789 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 711.912123] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 711.912123] env[70020]: value = "task-3617976" [ 711.912123] env[70020]: _type = "Task" [ 711.912123] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.924796] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617976, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.085060] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d435bc63-51a9-4b0b-b924-7eaf125c46a0 tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Acquiring lock "refresh_cache-a09db142-60d1-4a62-8e76-1e2e3676124f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.085060] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d435bc63-51a9-4b0b-b924-7eaf125c46a0 tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Acquired lock "refresh_cache-a09db142-60d1-4a62-8e76-1e2e3676124f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 712.085060] env[70020]: DEBUG nova.network.neutron [None req-d435bc63-51a9-4b0b-b924-7eaf125c46a0 tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 712.155038] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Creating Snapshot of the VM instance {{(pid=70020) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 712.155038] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-556796dd-05e5-4709-a790-24a35fb94e1e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.163101] env[70020]: DEBUG oslo_vmware.api [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 712.163101] env[70020]: value = "task-3617977" [ 712.163101] env[70020]: _type = "Task" [ 712.163101] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.171365] env[70020]: DEBUG oslo_vmware.api [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617977, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.258858] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72fe7c02-965f-4836-84a7-b13854df238b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.278440] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-805e62db-5ea1-4e5b-92c5-0885338ef327 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.409044] env[70020]: DEBUG nova.scheduler.client.report [None req-2efc0ca8-1101-4645-8662-ea4fe4b05789 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 712.429229] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617976, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.612592] env[70020]: DEBUG nova.network.neutron [None req-d435bc63-51a9-4b0b-b924-7eaf125c46a0 tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 712.672920] env[70020]: DEBUG oslo_vmware.api [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617977, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.701293] env[70020]: DEBUG nova.compute.manager [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 712.710593] env[70020]: DEBUG nova.network.neutron [None req-d435bc63-51a9-4b0b-b924-7eaf125c46a0 tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.733860] env[70020]: DEBUG nova.virt.hardware [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 712.734065] env[70020]: DEBUG nova.virt.hardware [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 712.734252] env[70020]: DEBUG nova.virt.hardware [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 712.734404] env[70020]: DEBUG nova.virt.hardware [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 712.734543] env[70020]: DEBUG nova.virt.hardware [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 712.734684] env[70020]: DEBUG nova.virt.hardware [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 712.734885] env[70020]: DEBUG nova.virt.hardware [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 712.735064] env[70020]: DEBUG nova.virt.hardware [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 712.735244] env[70020]: DEBUG nova.virt.hardware [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 712.735401] env[70020]: DEBUG nova.virt.hardware [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 712.735566] env[70020]: DEBUG nova.virt.hardware [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 712.736414] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8d42b34-b5e2-4e56-89dc-f2a5d1469c73 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.744585] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcd10304-9127-41fd-8bab-e1c14c92eb40 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.789695] env[70020]: DEBUG nova.compute.manager [None req-04e10bb2-5539-45dc-9a02-ad4df5b977ae tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Instance disappeared during snapshot {{(pid=70020) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 712.920867] env[70020]: DEBUG nova.compute.manager [None req-04e10bb2-5539-45dc-9a02-ad4df5b977ae tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Found 0 images (rotation: 2) {{(pid=70020) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 712.922390] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2efc0ca8-1101-4645-8662-ea4fe4b05789 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.247s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 712.924634] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.023s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 712.927647] env[70020]: INFO nova.compute.claims [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 712.936601] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617976, 'name': CreateVM_Task, 'duration_secs': 0.790294} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.936786] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 712.937539] env[70020]: DEBUG oslo_concurrency.lockutils [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.937679] env[70020]: DEBUG oslo_concurrency.lockutils [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 712.938222] env[70020]: DEBUG oslo_concurrency.lockutils [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 712.938371] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-731ca1a7-9bc8-4174-aae9-2d08e9d8e7ca {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.947372] env[70020]: INFO nova.scheduler.client.report [None req-2efc0ca8-1101-4645-8662-ea4fe4b05789 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Deleted allocations for instance 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468 [ 712.949641] env[70020]: DEBUG oslo_vmware.api [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Waiting for the task: (returnval){ [ 712.949641] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52c2a130-044b-2b74-2a70-fec95b95aa59" [ 712.949641] env[70020]: _type = "Task" [ 712.949641] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.967176] env[70020]: DEBUG oslo_vmware.api [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52c2a130-044b-2b74-2a70-fec95b95aa59, 'name': SearchDatastore_Task, 'duration_secs': 0.016806} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.967827] env[70020]: DEBUG oslo_concurrency.lockutils [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 712.968136] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 712.968388] env[70020]: DEBUG oslo_concurrency.lockutils [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.968536] env[70020]: DEBUG oslo_concurrency.lockutils [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 712.968711] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 712.969021] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-35cd8326-bcc9-4c12-b312-e85320a26943 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.980042] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 712.980710] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 712.981601] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df0ece00-2a57-472a-bbee-6aa08e358bb8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.989732] env[70020]: DEBUG oslo_vmware.api [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Waiting for the task: (returnval){ [ 712.989732] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]522acba1-01ba-3ab8-8823-dc3fef43b806" [ 712.989732] env[70020]: _type = "Task" [ 712.989732] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.999946] env[70020]: DEBUG oslo_vmware.api [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]522acba1-01ba-3ab8-8823-dc3fef43b806, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.172621] env[70020]: DEBUG oslo_vmware.api [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617977, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.216340] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d435bc63-51a9-4b0b-b924-7eaf125c46a0 tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Releasing lock "refresh_cache-a09db142-60d1-4a62-8e76-1e2e3676124f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 713.216340] env[70020]: DEBUG nova.compute.manager [None req-d435bc63-51a9-4b0b-b924-7eaf125c46a0 tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 713.216340] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d435bc63-51a9-4b0b-b924-7eaf125c46a0 tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 713.216340] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74f9ecf4-0d25-4b46-92f3-9c60cd828f4c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.223053] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-d435bc63-51a9-4b0b-b924-7eaf125c46a0 tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 713.223626] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c7b994d9-a2df-4c01-8154-d559c659e372 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.229874] env[70020]: DEBUG oslo_vmware.api [None req-d435bc63-51a9-4b0b-b924-7eaf125c46a0 tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Waiting for the task: (returnval){ [ 713.229874] env[70020]: value = "task-3617978" [ 713.229874] env[70020]: _type = "Task" [ 713.229874] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.240310] env[70020]: DEBUG oslo_vmware.api [None req-d435bc63-51a9-4b0b-b924-7eaf125c46a0 tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Task: {'id': task-3617978, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.373089] env[70020]: DEBUG nova.compute.manager [req-22e33b3f-2b27-4721-8390-a76519d514a5 req-f436a89a-0437-47b7-8a3c-c802d7047392 service nova] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Received event network-vif-plugged-9c1b9c25-fd2c-4379-959d-d103fa9a5848 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 713.373089] env[70020]: DEBUG oslo_concurrency.lockutils [req-22e33b3f-2b27-4721-8390-a76519d514a5 req-f436a89a-0437-47b7-8a3c-c802d7047392 service nova] Acquiring lock "6c36df58-3ab3-4595-b89c-9ab5a4664eec-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 713.373089] env[70020]: DEBUG oslo_concurrency.lockutils [req-22e33b3f-2b27-4721-8390-a76519d514a5 req-f436a89a-0437-47b7-8a3c-c802d7047392 service nova] Lock "6c36df58-3ab3-4595-b89c-9ab5a4664eec-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 713.373089] env[70020]: DEBUG oslo_concurrency.lockutils [req-22e33b3f-2b27-4721-8390-a76519d514a5 req-f436a89a-0437-47b7-8a3c-c802d7047392 service nova] Lock "6c36df58-3ab3-4595-b89c-9ab5a4664eec-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 713.373089] env[70020]: DEBUG nova.compute.manager [req-22e33b3f-2b27-4721-8390-a76519d514a5 req-f436a89a-0437-47b7-8a3c-c802d7047392 service nova] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] No waiting events found dispatching network-vif-plugged-9c1b9c25-fd2c-4379-959d-d103fa9a5848 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 713.373579] env[70020]: WARNING nova.compute.manager [req-22e33b3f-2b27-4721-8390-a76519d514a5 req-f436a89a-0437-47b7-8a3c-c802d7047392 service nova] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Received unexpected event network-vif-plugged-9c1b9c25-fd2c-4379-959d-d103fa9a5848 for instance with vm_state building and task_state spawning. [ 713.386276] env[70020]: DEBUG nova.network.neutron [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Successfully updated port: 9c1b9c25-fd2c-4379-959d-d103fa9a5848 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 713.458764] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2efc0ca8-1101-4645-8662-ea4fe4b05789 tempest-InstanceActionsV221TestJSON-1422150622 tempest-InstanceActionsV221TestJSON-1422150622-project-member] Lock "0f89d49e-d26c-4d5d-90d7-6f0bf3d67468" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.466s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 713.504126] env[70020]: DEBUG oslo_vmware.api [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]522acba1-01ba-3ab8-8823-dc3fef43b806, 'name': SearchDatastore_Task, 'duration_secs': 0.009186} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.505579] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25e4c05c-7261-4e03-a8b7-d094ae7bc64c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.512600] env[70020]: DEBUG oslo_vmware.api [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Waiting for the task: (returnval){ [ 713.512600] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52e69754-ce1d-cc01-d25b-548c33973fe2" [ 713.512600] env[70020]: _type = "Task" [ 713.512600] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.522993] env[70020]: DEBUG oslo_vmware.api [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52e69754-ce1d-cc01-d25b-548c33973fe2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.672608] env[70020]: DEBUG oslo_vmware.api [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617977, 'name': CreateSnapshot_Task, 'duration_secs': 1.25714} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.672905] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Created Snapshot of the VM instance {{(pid=70020) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 713.675031] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8e12ad6-7775-459b-8e01-cf693422c087 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.743064] env[70020]: DEBUG oslo_vmware.api [None req-d435bc63-51a9-4b0b-b924-7eaf125c46a0 tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Task: {'id': task-3617978, 'name': PowerOffVM_Task, 'duration_secs': 0.219873} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.743064] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-d435bc63-51a9-4b0b-b924-7eaf125c46a0 tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 713.743303] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d435bc63-51a9-4b0b-b924-7eaf125c46a0 tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 713.743432] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5de64fda-693f-471b-9a48-3013696ec5aa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.767453] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d435bc63-51a9-4b0b-b924-7eaf125c46a0 tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 713.767681] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d435bc63-51a9-4b0b-b924-7eaf125c46a0 tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 713.767860] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-d435bc63-51a9-4b0b-b924-7eaf125c46a0 tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Deleting the datastore file [datastore1] a09db142-60d1-4a62-8e76-1e2e3676124f {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 713.768130] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-997f6d5d-43d5-4038-a0af-dd485d2530eb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.775484] env[70020]: DEBUG oslo_vmware.api [None req-d435bc63-51a9-4b0b-b924-7eaf125c46a0 tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Waiting for the task: (returnval){ [ 713.775484] env[70020]: value = "task-3617980" [ 713.775484] env[70020]: _type = "Task" [ 713.775484] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.783789] env[70020]: DEBUG oslo_vmware.api [None req-d435bc63-51a9-4b0b-b924-7eaf125c46a0 tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Task: {'id': task-3617980, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.889199] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Acquiring lock "refresh_cache-6c36df58-3ab3-4595-b89c-9ab5a4664eec" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 713.889359] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Acquired lock "refresh_cache-6c36df58-3ab3-4595-b89c-9ab5a4664eec" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 713.889517] env[70020]: DEBUG nova.network.neutron [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 714.025331] env[70020]: DEBUG oslo_vmware.api [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52e69754-ce1d-cc01-d25b-548c33973fe2, 'name': SearchDatastore_Task, 'duration_secs': 0.009597} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.025331] env[70020]: DEBUG oslo_concurrency.lockutils [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 714.026449] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 19036f6f-2ee3-4ea5-82fa-b510bf903922/19036f6f-2ee3-4ea5-82fa-b510bf903922.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 714.026626] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b20a5ff4-5530-4456-8a75-f1dc317f5d5d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.033431] env[70020]: DEBUG oslo_vmware.api [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Waiting for the task: (returnval){ [ 714.033431] env[70020]: value = "task-3617981" [ 714.033431] env[70020]: _type = "Task" [ 714.033431] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.043225] env[70020]: DEBUG oslo_vmware.api [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Task: {'id': task-3617981, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.193718] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Creating linked-clone VM from snapshot {{(pid=70020) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 714.197358] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-fea77fcb-4c1d-413f-877f-dd47436aaf55 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.210845] env[70020]: DEBUG oslo_vmware.api [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 714.210845] env[70020]: value = "task-3617982" [ 714.210845] env[70020]: _type = "Task" [ 714.210845] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.223689] env[70020]: DEBUG oslo_vmware.api [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617982, 'name': CloneVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.289150] env[70020]: DEBUG oslo_vmware.api [None req-d435bc63-51a9-4b0b-b924-7eaf125c46a0 tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Task: {'id': task-3617980, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.089416} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.289520] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-d435bc63-51a9-4b0b-b924-7eaf125c46a0 tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 714.289609] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d435bc63-51a9-4b0b-b924-7eaf125c46a0 tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 714.289765] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d435bc63-51a9-4b0b-b924-7eaf125c46a0 tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 714.289932] env[70020]: INFO nova.compute.manager [None req-d435bc63-51a9-4b0b-b924-7eaf125c46a0 tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Took 1.08 seconds to destroy the instance on the hypervisor. [ 714.290193] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d435bc63-51a9-4b0b-b924-7eaf125c46a0 tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 714.290385] env[70020]: DEBUG nova.compute.manager [-] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 714.290503] env[70020]: DEBUG nova.network.neutron [-] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 714.312952] env[70020]: DEBUG nova.network.neutron [-] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 714.453824] env[70020]: DEBUG nova.network.neutron [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 714.546151] env[70020]: DEBUG oslo_vmware.api [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Task: {'id': task-3617981, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.469275} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.546739] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 19036f6f-2ee3-4ea5-82fa-b510bf903922/19036f6f-2ee3-4ea5-82fa-b510bf903922.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 714.546739] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 714.546912] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-baa0b4cf-5aa0-4982-bd41-cf4c7864ecac {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.560537] env[70020]: DEBUG oslo_vmware.api [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Waiting for the task: (returnval){ [ 714.560537] env[70020]: value = "task-3617983" [ 714.560537] env[70020]: _type = "Task" [ 714.560537] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.571357] env[70020]: DEBUG oslo_vmware.api [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Task: {'id': task-3617983, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.639905] env[70020]: DEBUG nova.network.neutron [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Updating instance_info_cache with network_info: [{"id": "9c1b9c25-fd2c-4379-959d-d103fa9a5848", "address": "fa:16:3e:ef:69:54", "network": {"id": "f2e5d2a4-cde1-4c13-9fd8-e81667b4579e", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1298127480-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a3b881f355894590b537166c8bb03f52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f678cd81-6d15-43d5-aab7-d7eedc2ef2d5", "external-id": "nsx-vlan-transportzone-602", "segmentation_id": 602, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c1b9c25-fd", "ovs_interfaceid": "9c1b9c25-fd2c-4379-959d-d103fa9a5848", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.676397] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38aa30d0-8634-462a-901c-f70093202e0d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.684574] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2af4567c-2cb7-4067-942b-391a0063fa0f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.718505] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-322ae4b9-9c88-4491-9bce-76733039c912 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.726560] env[70020]: DEBUG oslo_vmware.api [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617982, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.734033] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef654b4c-760f-4f21-b332-d742adb71619 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.749083] env[70020]: DEBUG nova.compute.provider_tree [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 714.815998] env[70020]: DEBUG nova.network.neutron [-] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.074060] env[70020]: DEBUG oslo_vmware.api [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Task: {'id': task-3617983, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069274} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.074530] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 715.075423] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbef79af-b90a-4e4c-b4ee-7f8c3aa2bdd0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.099184] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Reconfiguring VM instance instance-0000001f to attach disk [datastore1] 19036f6f-2ee3-4ea5-82fa-b510bf903922/19036f6f-2ee3-4ea5-82fa-b510bf903922.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 715.099998] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a1f4af5e-e097-4145-8afe-73551226cabd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.124133] env[70020]: DEBUG oslo_vmware.api [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Waiting for the task: (returnval){ [ 715.124133] env[70020]: value = "task-3617984" [ 715.124133] env[70020]: _type = "Task" [ 715.124133] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.133831] env[70020]: DEBUG oslo_vmware.api [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Task: {'id': task-3617984, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.145020] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Releasing lock "refresh_cache-6c36df58-3ab3-4595-b89c-9ab5a4664eec" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 715.145020] env[70020]: DEBUG nova.compute.manager [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Instance network_info: |[{"id": "9c1b9c25-fd2c-4379-959d-d103fa9a5848", "address": "fa:16:3e:ef:69:54", "network": {"id": "f2e5d2a4-cde1-4c13-9fd8-e81667b4579e", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1298127480-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a3b881f355894590b537166c8bb03f52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f678cd81-6d15-43d5-aab7-d7eedc2ef2d5", "external-id": "nsx-vlan-transportzone-602", "segmentation_id": 602, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c1b9c25-fd", "ovs_interfaceid": "9c1b9c25-fd2c-4379-959d-d103fa9a5848", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 715.145285] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ef:69:54', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f678cd81-6d15-43d5-aab7-d7eedc2ef2d5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9c1b9c25-fd2c-4379-959d-d103fa9a5848', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 715.151421] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Creating folder: Project (a3b881f355894590b537166c8bb03f52). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 715.151937] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-497fc844-9c7a-4b48-9922-3aa61ff96c89 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.165152] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Created folder: Project (a3b881f355894590b537166c8bb03f52) in parent group-v721521. [ 715.165152] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Creating folder: Instances. Parent ref: group-v721627. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 715.165152] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-46a4ba1e-8fcf-4402-bc4a-e38bafa14629 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.174249] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Created folder: Instances in parent group-v721627. [ 715.174249] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 715.174249] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 715.174249] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c7b2cd9e-01b4-4d11-a708-af911b2fffa4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.195887] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 715.195887] env[70020]: value = "task-3617987" [ 715.195887] env[70020]: _type = "Task" [ 715.195887] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.204889] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617987, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.224331] env[70020]: DEBUG oslo_vmware.api [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617982, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.254310] env[70020]: DEBUG nova.scheduler.client.report [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 715.318976] env[70020]: INFO nova.compute.manager [-] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Took 1.03 seconds to deallocate network for instance. [ 715.632123] env[70020]: DEBUG oslo_vmware.api [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Task: {'id': task-3617984, 'name': ReconfigVM_Task, 'duration_secs': 0.450876} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.632660] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Reconfigured VM instance instance-0000001f to attach disk [datastore1] 19036f6f-2ee3-4ea5-82fa-b510bf903922/19036f6f-2ee3-4ea5-82fa-b510bf903922.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 715.633128] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fd8834d9-22b8-437d-8c86-8582d65f1b3f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.642219] env[70020]: DEBUG oslo_vmware.api [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Waiting for the task: (returnval){ [ 715.642219] env[70020]: value = "task-3617988" [ 715.642219] env[70020]: _type = "Task" [ 715.642219] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.650027] env[70020]: DEBUG oslo_vmware.api [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Task: {'id': task-3617988, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.705588] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617987, 'name': CreateVM_Task, 'duration_secs': 0.383889} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.705866] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 715.706521] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.706695] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 715.706993] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 715.707273] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9def20d-0f85-4fe3-9bcb-a85b222621b7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.714926] env[70020]: DEBUG oslo_vmware.api [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Waiting for the task: (returnval){ [ 715.714926] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52ef6989-bd32-fbee-dc29-62ec6b619830" [ 715.714926] env[70020]: _type = "Task" [ 715.714926] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.738314] env[70020]: DEBUG oslo_vmware.api [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ef6989-bd32-fbee-dc29-62ec6b619830, 'name': SearchDatastore_Task, 'duration_secs': 0.009717} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.741770] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 715.742055] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 715.742305] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.742452] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 715.742624] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 715.742902] env[70020]: DEBUG oslo_vmware.api [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3617982, 'name': CloneVM_Task, 'duration_secs': 1.471288} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.743210] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-437daaa4-9dda-41c2-8667-d1f32e06f1a9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.745105] env[70020]: INFO nova.virt.vmwareapi.vmops [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Created linked-clone VM from snapshot [ 715.745834] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-626fd4d4-f6bb-4c5e-87ff-ec9592efacff {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.753900] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Uploading image 27106013-2716-4959-a8cc-999a0ae152b3 {{(pid=70020) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 715.756693] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 715.756908] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 715.757560] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b862050e-d83f-4f14-8f29-70cab4dc7b04 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.760214] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.836s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 715.760552] env[70020]: DEBUG nova.compute.manager [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 715.763523] env[70020]: DEBUG oslo_concurrency.lockutils [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.254s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 715.765850] env[70020]: INFO nova.compute.claims [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 715.771831] env[70020]: DEBUG oslo_vmware.api [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Waiting for the task: (returnval){ [ 715.771831] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52d7bd35-1331-a042-03ca-40c771552f35" [ 715.771831] env[70020]: _type = "Task" [ 715.771831] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.780337] env[70020]: DEBUG oslo_vmware.api [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52d7bd35-1331-a042-03ca-40c771552f35, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.789032] env[70020]: DEBUG oslo_vmware.rw_handles [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 715.789032] env[70020]: value = "vm-721626" [ 715.789032] env[70020]: _type = "VirtualMachine" [ 715.789032] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 715.789819] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-db3a59d3-8d8a-4018-b188-fa29205c82cb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.795726] env[70020]: DEBUG oslo_vmware.rw_handles [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lease: (returnval){ [ 715.795726] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]522a8ee8-5f33-40d2-4d79-29997df08bc1" [ 715.795726] env[70020]: _type = "HttpNfcLease" [ 715.795726] env[70020]: } obtained for exporting VM: (result){ [ 715.795726] env[70020]: value = "vm-721626" [ 715.795726] env[70020]: _type = "VirtualMachine" [ 715.795726] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 715.795978] env[70020]: DEBUG oslo_vmware.api [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the lease: (returnval){ [ 715.795978] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]522a8ee8-5f33-40d2-4d79-29997df08bc1" [ 715.795978] env[70020]: _type = "HttpNfcLease" [ 715.795978] env[70020]: } to be ready. {{(pid=70020) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 715.802266] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 715.802266] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]522a8ee8-5f33-40d2-4d79-29997df08bc1" [ 715.802266] env[70020]: _type = "HttpNfcLease" [ 715.802266] env[70020]: } is initializing. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 715.825354] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d435bc63-51a9-4b0b-b924-7eaf125c46a0 tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 715.963786] env[70020]: DEBUG nova.compute.manager [req-2e565677-8fc1-42b3-afbd-ec6947372bb2 req-bf46373e-f7a6-4b1c-8a2a-fcbd42127450 service nova] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Received event network-changed-9c1b9c25-fd2c-4379-959d-d103fa9a5848 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 715.963967] env[70020]: DEBUG nova.compute.manager [req-2e565677-8fc1-42b3-afbd-ec6947372bb2 req-bf46373e-f7a6-4b1c-8a2a-fcbd42127450 service nova] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Refreshing instance network info cache due to event network-changed-9c1b9c25-fd2c-4379-959d-d103fa9a5848. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 715.964247] env[70020]: DEBUG oslo_concurrency.lockutils [req-2e565677-8fc1-42b3-afbd-ec6947372bb2 req-bf46373e-f7a6-4b1c-8a2a-fcbd42127450 service nova] Acquiring lock "refresh_cache-6c36df58-3ab3-4595-b89c-9ab5a4664eec" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.964494] env[70020]: DEBUG oslo_concurrency.lockutils [req-2e565677-8fc1-42b3-afbd-ec6947372bb2 req-bf46373e-f7a6-4b1c-8a2a-fcbd42127450 service nova] Acquired lock "refresh_cache-6c36df58-3ab3-4595-b89c-9ab5a4664eec" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 715.964655] env[70020]: DEBUG nova.network.neutron [req-2e565677-8fc1-42b3-afbd-ec6947372bb2 req-bf46373e-f7a6-4b1c-8a2a-fcbd42127450 service nova] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Refreshing network info cache for port 9c1b9c25-fd2c-4379-959d-d103fa9a5848 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 716.149566] env[70020]: DEBUG oslo_vmware.api [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Task: {'id': task-3617988, 'name': Rename_Task, 'duration_secs': 0.172948} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.149834] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 716.150085] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-899ef9a0-0eb7-4387-a6d6-97b2416000f1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.156450] env[70020]: DEBUG oslo_vmware.api [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Waiting for the task: (returnval){ [ 716.156450] env[70020]: value = "task-3617990" [ 716.156450] env[70020]: _type = "Task" [ 716.156450] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.165070] env[70020]: DEBUG oslo_concurrency.lockutils [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Acquiring lock "2198e7f8-5458-4b97-abb3-0a3c932cebc2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 716.165342] env[70020]: DEBUG oslo_concurrency.lockutils [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Lock "2198e7f8-5458-4b97-abb3-0a3c932cebc2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 716.176661] env[70020]: DEBUG oslo_vmware.api [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Task: {'id': task-3617990, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.276369] env[70020]: DEBUG nova.compute.utils [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 716.278862] env[70020]: DEBUG nova.compute.manager [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 716.278862] env[70020]: DEBUG nova.network.neutron [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 716.300396] env[70020]: DEBUG oslo_vmware.api [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52d7bd35-1331-a042-03ca-40c771552f35, 'name': SearchDatastore_Task, 'duration_secs': 0.009219} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.304950] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6116049a-852e-43ed-ba85-0c253fc12466 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.315795] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 716.315795] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]522a8ee8-5f33-40d2-4d79-29997df08bc1" [ 716.315795] env[70020]: _type = "HttpNfcLease" [ 716.315795] env[70020]: } is ready. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 716.320689] env[70020]: DEBUG oslo_vmware.rw_handles [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 716.320689] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]522a8ee8-5f33-40d2-4d79-29997df08bc1" [ 716.320689] env[70020]: _type = "HttpNfcLease" [ 716.320689] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 716.321097] env[70020]: DEBUG oslo_vmware.api [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Waiting for the task: (returnval){ [ 716.321097] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52d89b9d-9499-e341-bd0f-028c9bc2777a" [ 716.321097] env[70020]: _type = "Task" [ 716.321097] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.322034] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3ec37ad-7ea5-43c8-b688-0a6295813ead {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.335201] env[70020]: DEBUG oslo_vmware.rw_handles [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523ea462-6d2d-9c91-cb15-7ee79a2ee284/disk-0.vmdk from lease info. {{(pid=70020) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 716.335402] env[70020]: DEBUG oslo_vmware.rw_handles [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523ea462-6d2d-9c91-cb15-7ee79a2ee284/disk-0.vmdk for reading. {{(pid=70020) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 716.344683] env[70020]: DEBUG oslo_vmware.api [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52d89b9d-9499-e341-bd0f-028c9bc2777a, 'name': SearchDatastore_Task, 'duration_secs': 0.009741} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.344923] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 716.346060] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 6c36df58-3ab3-4595-b89c-9ab5a4664eec/6c36df58-3ab3-4595-b89c-9ab5a4664eec.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 716.399583] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0526f299-8829-41c4-8ae5-68da3ea6b5a0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.406557] env[70020]: DEBUG nova.policy [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'da6aef21bbb04398b9a1964c0e5379fa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4988050072ea4c81a69c636049df9e4b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 716.415206] env[70020]: DEBUG oslo_vmware.api [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Waiting for the task: (returnval){ [ 716.415206] env[70020]: value = "task-3617991" [ 716.415206] env[70020]: _type = "Task" [ 716.415206] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.423756] env[70020]: DEBUG oslo_vmware.api [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Task: {'id': task-3617991, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.446943] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-ed67112f-3af1-4041-96e2-84ebc649753b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.670167] env[70020]: DEBUG oslo_vmware.api [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Task: {'id': task-3617990, 'name': PowerOnVM_Task, 'duration_secs': 0.456} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.670167] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 716.670945] env[70020]: INFO nova.compute.manager [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Took 8.88 seconds to spawn the instance on the hypervisor. [ 716.670945] env[70020]: DEBUG nova.compute.manager [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 716.671456] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8412a43d-3bff-4806-9547-b6a0746eb127 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.779280] env[70020]: DEBUG nova.compute.manager [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 716.915215] env[70020]: DEBUG nova.network.neutron [req-2e565677-8fc1-42b3-afbd-ec6947372bb2 req-bf46373e-f7a6-4b1c-8a2a-fcbd42127450 service nova] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Updated VIF entry in instance network info cache for port 9c1b9c25-fd2c-4379-959d-d103fa9a5848. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 716.916132] env[70020]: DEBUG nova.network.neutron [req-2e565677-8fc1-42b3-afbd-ec6947372bb2 req-bf46373e-f7a6-4b1c-8a2a-fcbd42127450 service nova] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Updating instance_info_cache with network_info: [{"id": "9c1b9c25-fd2c-4379-959d-d103fa9a5848", "address": "fa:16:3e:ef:69:54", "network": {"id": "f2e5d2a4-cde1-4c13-9fd8-e81667b4579e", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1298127480-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a3b881f355894590b537166c8bb03f52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f678cd81-6d15-43d5-aab7-d7eedc2ef2d5", "external-id": "nsx-vlan-transportzone-602", "segmentation_id": 602, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c1b9c25-fd", "ovs_interfaceid": "9c1b9c25-fd2c-4379-959d-d103fa9a5848", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.931649] env[70020]: DEBUG oslo_vmware.api [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Task: {'id': task-3617991, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.965080] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cc068ad-dc15-4033-b3e6-ce190f03f915 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.973872] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-036f1282-b1d7-4e1b-a340-41508f3e45f0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.005991] env[70020]: DEBUG nova.network.neutron [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Successfully created port: 2944f964-96c9-42c8-8914-3737e1a4349a {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 717.008539] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce03bb1-93bf-4685-ae59-1e7e2447c587 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.019823] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-024f32cc-3311-4318-bfb7-435165f3e88c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.034312] env[70020]: DEBUG nova.compute.provider_tree [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 717.197512] env[70020]: INFO nova.compute.manager [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Took 44.98 seconds to build instance. [ 717.289164] env[70020]: INFO nova.virt.block_device [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Booting with volume 1e88bef3-79b2-4977-918f-5c3ee9732e62 at /dev/sda [ 717.352456] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c640bb7f-f2ad-40d8-a00b-e9830634bc79 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.364068] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1ce06e9-b428-47cf-b8db-a98d50c84372 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.406168] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0d37df47-3289-415a-a892-b3e5d4bd1944 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.415441] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60bd7f64-08fd-4a4c-a640-76c2ac69150b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.427048] env[70020]: DEBUG oslo_concurrency.lockutils [req-2e565677-8fc1-42b3-afbd-ec6947372bb2 req-bf46373e-f7a6-4b1c-8a2a-fcbd42127450 service nova] Releasing lock "refresh_cache-6c36df58-3ab3-4595-b89c-9ab5a4664eec" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 717.436817] env[70020]: DEBUG oslo_vmware.api [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Task: {'id': task-3617991, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.457663] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d022c94-b498-4240-8a3d-242b3d0b36e1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.465779] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15473a3a-abad-4f57-bc3e-a5f06795ae48 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.481050] env[70020]: DEBUG nova.virt.block_device [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Updating existing volume attachment record: 8fd48cdd-11f2-4716-b129-9df94d2f2a40 {{(pid=70020) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 717.538044] env[70020]: DEBUG nova.scheduler.client.report [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 717.706026] env[70020]: DEBUG oslo_concurrency.lockutils [None req-829c167f-50cc-472b-94d7-848225ca6cb5 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Lock "19036f6f-2ee3-4ea5-82fa-b510bf903922" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.207s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 717.940370] env[70020]: DEBUG oslo_vmware.api [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Task: {'id': task-3617991, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.494591} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.940370] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 6c36df58-3ab3-4595-b89c-9ab5a4664eec/6c36df58-3ab3-4595-b89c-9ab5a4664eec.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 717.940370] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 717.940370] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cb64a9c6-197d-4e2f-a675-b8fff43dc940 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.946229] env[70020]: DEBUG oslo_vmware.api [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Waiting for the task: (returnval){ [ 717.946229] env[70020]: value = "task-3617992" [ 717.946229] env[70020]: _type = "Task" [ 717.946229] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.956787] env[70020]: DEBUG oslo_vmware.api [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Task: {'id': task-3617992, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.048118] env[70020]: DEBUG oslo_concurrency.lockutils [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.285s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 718.048649] env[70020]: DEBUG nova.compute.manager [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 718.051776] env[70020]: DEBUG oslo_concurrency.lockutils [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.337s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 718.053237] env[70020]: INFO nova.compute.claims [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 718.210529] env[70020]: DEBUG nova.compute.manager [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 718.460038] env[70020]: DEBUG oslo_vmware.api [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Task: {'id': task-3617992, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077138} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.460038] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 718.460038] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45b8f539-32f1-4930-86e1-1356474e87d2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.484649] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Reconfiguring VM instance instance-00000020 to attach disk [datastore1] 6c36df58-3ab3-4595-b89c-9ab5a4664eec/6c36df58-3ab3-4595-b89c-9ab5a4664eec.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 718.486034] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c1bea0d-d8e4-4460-bd90-f052b4a2028a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.508436] env[70020]: DEBUG oslo_vmware.api [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Waiting for the task: (returnval){ [ 718.508436] env[70020]: value = "task-3617993" [ 718.508436] env[70020]: _type = "Task" [ 718.508436] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.518385] env[70020]: DEBUG oslo_vmware.api [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Task: {'id': task-3617993, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.557949] env[70020]: DEBUG nova.compute.utils [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 718.565321] env[70020]: DEBUG nova.compute.manager [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Not allocating networking since 'none' was specified. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 718.618079] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7fb7d00c-dcd4-44e6-b3b4-80cbe4c97507 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Acquiring lock "interface-19036f6f-2ee3-4ea5-82fa-b510bf903922-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 718.618459] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7fb7d00c-dcd4-44e6-b3b4-80cbe4c97507 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Lock "interface-19036f6f-2ee3-4ea5-82fa-b510bf903922-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 718.618887] env[70020]: DEBUG nova.objects.instance [None req-7fb7d00c-dcd4-44e6-b3b4-80cbe4c97507 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Lazy-loading 'flavor' on Instance uuid 19036f6f-2ee3-4ea5-82fa-b510bf903922 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 718.733037] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 718.885541] env[70020]: DEBUG nova.network.neutron [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Successfully updated port: 2944f964-96c9-42c8-8914-3737e1a4349a {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 719.018346] env[70020]: DEBUG oslo_vmware.api [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Task: {'id': task-3617993, 'name': ReconfigVM_Task, 'duration_secs': 0.295782} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.018718] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Reconfigured VM instance instance-00000020 to attach disk [datastore1] 6c36df58-3ab3-4595-b89c-9ab5a4664eec/6c36df58-3ab3-4595-b89c-9ab5a4664eec.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 719.019332] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3153f9c4-a451-4efb-af63-1472981ff2d5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.025852] env[70020]: DEBUG oslo_vmware.api [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Waiting for the task: (returnval){ [ 719.025852] env[70020]: value = "task-3617994" [ 719.025852] env[70020]: _type = "Task" [ 719.025852] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.034887] env[70020]: DEBUG oslo_vmware.api [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Task: {'id': task-3617994, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.062116] env[70020]: DEBUG nova.compute.manager [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 719.112846] env[70020]: DEBUG nova.compute.manager [req-acc8888b-a8bd-47f3-91e9-5413ea523957 req-b7699987-b425-4359-b225-f022707c342d service nova] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Received event network-vif-plugged-2944f964-96c9-42c8-8914-3737e1a4349a {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 719.113121] env[70020]: DEBUG oslo_concurrency.lockutils [req-acc8888b-a8bd-47f3-91e9-5413ea523957 req-b7699987-b425-4359-b225-f022707c342d service nova] Acquiring lock "b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 719.113359] env[70020]: DEBUG oslo_concurrency.lockutils [req-acc8888b-a8bd-47f3-91e9-5413ea523957 req-b7699987-b425-4359-b225-f022707c342d service nova] Lock "b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 719.113582] env[70020]: DEBUG oslo_concurrency.lockutils [req-acc8888b-a8bd-47f3-91e9-5413ea523957 req-b7699987-b425-4359-b225-f022707c342d service nova] Lock "b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 719.113709] env[70020]: DEBUG nova.compute.manager [req-acc8888b-a8bd-47f3-91e9-5413ea523957 req-b7699987-b425-4359-b225-f022707c342d service nova] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] No waiting events found dispatching network-vif-plugged-2944f964-96c9-42c8-8914-3737e1a4349a {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 719.113945] env[70020]: WARNING nova.compute.manager [req-acc8888b-a8bd-47f3-91e9-5413ea523957 req-b7699987-b425-4359-b225-f022707c342d service nova] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Received unexpected event network-vif-plugged-2944f964-96c9-42c8-8914-3737e1a4349a for instance with vm_state building and task_state block_device_mapping. [ 719.123241] env[70020]: DEBUG nova.objects.instance [None req-7fb7d00c-dcd4-44e6-b3b4-80cbe4c97507 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Lazy-loading 'pci_requests' on Instance uuid 19036f6f-2ee3-4ea5-82fa-b510bf903922 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 719.388210] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Acquiring lock "refresh_cache-b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.388366] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Acquired lock "refresh_cache-b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 719.388520] env[70020]: DEBUG nova.network.neutron [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 719.537036] env[70020]: DEBUG oslo_vmware.api [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Task: {'id': task-3617994, 'name': Rename_Task, 'duration_secs': 0.155592} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.540082] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 719.540606] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-17f225ee-8071-4dba-95f9-eb3c6514520f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.547505] env[70020]: DEBUG oslo_vmware.api [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Waiting for the task: (returnval){ [ 719.547505] env[70020]: value = "task-3617995" [ 719.547505] env[70020]: _type = "Task" [ 719.547505] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.558921] env[70020]: DEBUG oslo_vmware.api [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Task: {'id': task-3617995, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.584203] env[70020]: DEBUG nova.compute.manager [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 719.584793] env[70020]: DEBUG nova.virt.hardware [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 719.585919] env[70020]: DEBUG nova.virt.hardware [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 719.585919] env[70020]: DEBUG nova.virt.hardware [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 719.585919] env[70020]: DEBUG nova.virt.hardware [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 719.585919] env[70020]: DEBUG nova.virt.hardware [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 719.587621] env[70020]: DEBUG nova.virt.hardware [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 719.587621] env[70020]: DEBUG nova.virt.hardware [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 719.587621] env[70020]: DEBUG nova.virt.hardware [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 719.587621] env[70020]: DEBUG nova.virt.hardware [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 719.587621] env[70020]: DEBUG nova.virt.hardware [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 719.587877] env[70020]: DEBUG nova.virt.hardware [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 719.592168] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73f5ab24-1398-4553-83e3-b0b1ddaf3b14 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.601586] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a586b48-d455-4387-a6b8-8d61b6d57018 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.630217] env[70020]: DEBUG nova.objects.base [None req-7fb7d00c-dcd4-44e6-b3b4-80cbe4c97507 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Object Instance<19036f6f-2ee3-4ea5-82fa-b510bf903922> lazy-loaded attributes: flavor,pci_requests {{(pid=70020) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 719.632695] env[70020]: DEBUG nova.network.neutron [None req-7fb7d00c-dcd4-44e6-b3b4-80cbe4c97507 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 719.741989] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a9e0027-8aa6-4445-ba0d-4fb976994e23 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.751198] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7fb7d00c-dcd4-44e6-b3b4-80cbe4c97507 tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Lock "interface-19036f6f-2ee3-4ea5-82fa-b510bf903922-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.133s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 719.754038] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c77f23a-2428-4308-bcd4-00d8cc973204 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.789165] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdc09fea-5a03-4b11-b7a7-a110468967a5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.797815] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3c04392-5b8b-4d00-bd2c-276fb68ffa73 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.812550] env[70020]: DEBUG nova.compute.provider_tree [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 719.928855] env[70020]: DEBUG nova.network.neutron [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 720.060183] env[70020]: DEBUG oslo_vmware.api [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Task: {'id': task-3617995, 'name': PowerOnVM_Task, 'duration_secs': 0.496299} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.060538] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 720.060538] env[70020]: INFO nova.compute.manager [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Took 7.36 seconds to spawn the instance on the hypervisor. [ 720.060709] env[70020]: DEBUG nova.compute.manager [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 720.061609] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-418189ad-dce4-4666-b508-c387966166ac {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.076702] env[70020]: DEBUG nova.compute.manager [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 720.093332] env[70020]: DEBUG nova.network.neutron [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Updating instance_info_cache with network_info: [{"id": "2944f964-96c9-42c8-8914-3737e1a4349a", "address": "fa:16:3e:c3:66:11", "network": {"id": "7e64c7c3-a66a-4854-8f1a-153495baabb6", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-192249615-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4988050072ea4c81a69c636049df9e4b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e238ac23-819b-452f-9015-52922e45efd3", "external-id": "nsx-vlan-transportzone-127", "segmentation_id": 127, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2944f964-96", "ovs_interfaceid": "2944f964-96c9-42c8-8914-3737e1a4349a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.109797] env[70020]: DEBUG nova.virt.hardware [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 720.110061] env[70020]: DEBUG nova.virt.hardware [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 720.110338] env[70020]: DEBUG nova.virt.hardware [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 720.110432] env[70020]: DEBUG nova.virt.hardware [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 720.110579] env[70020]: DEBUG nova.virt.hardware [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 720.110726] env[70020]: DEBUG nova.virt.hardware [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 720.110975] env[70020]: DEBUG nova.virt.hardware [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 720.111181] env[70020]: DEBUG nova.virt.hardware [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 720.111424] env[70020]: DEBUG nova.virt.hardware [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 720.111566] env[70020]: DEBUG nova.virt.hardware [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 720.111866] env[70020]: DEBUG nova.virt.hardware [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 720.113059] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9652621-b2a6-431a-9524-5dee5ead8e44 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.122227] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bee25076-5d07-4ad1-b5ba-dfc3923624a6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.138854] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Instance VIF info [] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 720.145031] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Creating folder: Project (9ab619bd27bb4b7db48d604ad2a9da1d). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 720.145602] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e07076d2-bcec-481a-b331-12cc7f33a581 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.156060] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Created folder: Project (9ab619bd27bb4b7db48d604ad2a9da1d) in parent group-v721521. [ 720.156060] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Creating folder: Instances. Parent ref: group-v721630. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 720.156060] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ee5fafa7-e9c1-40e1-a628-0419066936b7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.164908] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Created folder: Instances in parent group-v721630. [ 720.165174] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 720.165532] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 720.165796] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5e531634-139e-459f-8c01-f465a512fa93 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.184041] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 720.184041] env[70020]: value = "task-3617998" [ 720.184041] env[70020]: _type = "Task" [ 720.184041] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.192392] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617998, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.315823] env[70020]: DEBUG nova.scheduler.client.report [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 720.581290] env[70020]: INFO nova.compute.manager [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Took 43.46 seconds to build instance. [ 720.595526] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Releasing lock "refresh_cache-b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 720.595976] env[70020]: DEBUG nova.compute.manager [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Instance network_info: |[{"id": "2944f964-96c9-42c8-8914-3737e1a4349a", "address": "fa:16:3e:c3:66:11", "network": {"id": "7e64c7c3-a66a-4854-8f1a-153495baabb6", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-192249615-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4988050072ea4c81a69c636049df9e4b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e238ac23-819b-452f-9015-52922e45efd3", "external-id": "nsx-vlan-transportzone-127", "segmentation_id": 127, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2944f964-96", "ovs_interfaceid": "2944f964-96c9-42c8-8914-3737e1a4349a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 720.596697] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c3:66:11', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e238ac23-819b-452f-9015-52922e45efd3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2944f964-96c9-42c8-8914-3737e1a4349a', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 720.608040] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Creating folder: Project (4988050072ea4c81a69c636049df9e4b). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 720.609241] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-06bb6d6d-fcfc-458d-9133-57476c20433b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.623064] env[70020]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 720.623428] env[70020]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=70020) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 720.623967] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Folder already exists: Project (4988050072ea4c81a69c636049df9e4b). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 720.624295] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Creating folder: Instances. Parent ref: group-v721575. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 720.625098] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fbe926b2-2b8a-4c56-bdfc-8ba287354868 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.635374] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Created folder: Instances in parent group-v721575. [ 720.635535] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 720.635771] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 720.636045] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eb0649c4-31e4-4122-aa0a-e7717c376d43 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.656160] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 720.656160] env[70020]: value = "task-3618001" [ 720.656160] env[70020]: _type = "Task" [ 720.656160] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.664380] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618001, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.693275] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3617998, 'name': CreateVM_Task, 'duration_secs': 0.462042} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.693474] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 720.693936] env[70020]: DEBUG oslo_concurrency.lockutils [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 720.694129] env[70020]: DEBUG oslo_concurrency.lockutils [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 720.694508] env[70020]: DEBUG oslo_concurrency.lockutils [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 720.694760] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2834168-51b7-43dd-8380-51d457228764 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.700239] env[70020]: DEBUG oslo_vmware.api [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Waiting for the task: (returnval){ [ 720.700239] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]522aa83e-de83-66bd-0843-696d334f41f4" [ 720.700239] env[70020]: _type = "Task" [ 720.700239] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.709045] env[70020]: DEBUG oslo_vmware.api [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]522aa83e-de83-66bd-0843-696d334f41f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.824024] env[70020]: DEBUG oslo_concurrency.lockutils [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.769s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 720.824024] env[70020]: DEBUG nova.compute.manager [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 720.824932] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6877f9d3-19d9-40c3-9940-10e23bd854cc tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.267s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 720.825346] env[70020]: DEBUG nova.objects.instance [None req-6877f9d3-19d9-40c3-9940-10e23bd854cc tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Lazy-loading 'resources' on Instance uuid 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 721.083489] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c34b3dc4-5d9c-472f-81d8-c78dc96265ba tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Lock "6c36df58-3ab3-4595-b89c-9ab5a4664eec" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.813s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 721.176953] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618001, 'name': CreateVM_Task, 'duration_secs': 0.466921} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.176953] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 721.177647] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'attachment_id': '8fd48cdd-11f2-4716-b129-9df94d2f2a40', 'guest_format': None, 'delete_on_termination': True, 'disk_bus': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721586', 'volume_id': '1e88bef3-79b2-4977-918f-5c3ee9732e62', 'name': 'volume-1e88bef3-79b2-4977-918f-5c3ee9732e62', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e', 'attached_at': '', 'detached_at': '', 'volume_id': '1e88bef3-79b2-4977-918f-5c3ee9732e62', 'serial': '1e88bef3-79b2-4977-918f-5c3ee9732e62'}, 'mount_device': '/dev/sda', 'boot_index': 0, 'device_type': None, 'volume_type': None}], 'swap': None} {{(pid=70020) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 721.179885] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Root volume attach. Driver type: vmdk {{(pid=70020) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 721.180757] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cc955d8-4fdf-4ee1-aa6a-b8d8d67816c2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.191410] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebf1818d-d6b1-472d-b470-7cc53aaf68d8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.206179] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3e9db72-992f-40ae-94c6-d210c5b3a387 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.217689] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-29ee3c1a-b70d-4739-a8ef-d1c1cd08d987 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.218993] env[70020]: DEBUG oslo_vmware.api [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]522aa83e-de83-66bd-0843-696d334f41f4, 'name': SearchDatastore_Task, 'duration_secs': 0.013699} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.219299] env[70020]: DEBUG oslo_concurrency.lockutils [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 721.219522] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 721.219750] env[70020]: DEBUG oslo_concurrency.lockutils [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.219887] env[70020]: DEBUG oslo_concurrency.lockutils [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 721.220065] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 721.220655] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-44ec67c3-a11f-4177-92d8-4e0758904d61 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.225350] env[70020]: DEBUG oslo_vmware.api [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Waiting for the task: (returnval){ [ 721.225350] env[70020]: value = "task-3618002" [ 721.225350] env[70020]: _type = "Task" [ 721.225350] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.229340] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 721.229504] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 721.233810] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23c9b69c-d983-4191-8ca6-8426c1e1779e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.236022] env[70020]: DEBUG oslo_vmware.api [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Task: {'id': task-3618002, 'name': RelocateVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.239592] env[70020]: DEBUG oslo_vmware.api [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Waiting for the task: (returnval){ [ 721.239592] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5272dc62-d6e1-5acb-bb00-2ea483350495" [ 721.239592] env[70020]: _type = "Task" [ 721.239592] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.248598] env[70020]: DEBUG oslo_vmware.api [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5272dc62-d6e1-5acb-bb00-2ea483350495, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.329050] env[70020]: DEBUG nova.compute.utils [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 721.333780] env[70020]: DEBUG nova.compute.manager [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 721.333960] env[70020]: DEBUG nova.network.neutron [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 721.382387] env[70020]: DEBUG nova.policy [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '025d293d3c0449e1b36a7aa465ad1110', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bd3733a000724aab9255cb498cecdfba', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 721.552299] env[70020]: DEBUG nova.compute.manager [req-11d469dc-2d98-4d6a-b1e2-a8531dfbe8a5 req-775ca413-7208-410b-8ddd-eb48cdbfb37c service nova] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Received event network-changed-2944f964-96c9-42c8-8914-3737e1a4349a {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 721.554756] env[70020]: DEBUG nova.compute.manager [req-11d469dc-2d98-4d6a-b1e2-a8531dfbe8a5 req-775ca413-7208-410b-8ddd-eb48cdbfb37c service nova] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Refreshing instance network info cache due to event network-changed-2944f964-96c9-42c8-8914-3737e1a4349a. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 721.554756] env[70020]: DEBUG oslo_concurrency.lockutils [req-11d469dc-2d98-4d6a-b1e2-a8531dfbe8a5 req-775ca413-7208-410b-8ddd-eb48cdbfb37c service nova] Acquiring lock "refresh_cache-b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.554756] env[70020]: DEBUG oslo_concurrency.lockutils [req-11d469dc-2d98-4d6a-b1e2-a8531dfbe8a5 req-775ca413-7208-410b-8ddd-eb48cdbfb37c service nova] Acquired lock "refresh_cache-b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 721.554756] env[70020]: DEBUG nova.network.neutron [req-11d469dc-2d98-4d6a-b1e2-a8531dfbe8a5 req-775ca413-7208-410b-8ddd-eb48cdbfb37c service nova] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Refreshing network info cache for port 2944f964-96c9-42c8-8914-3737e1a4349a {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 721.586111] env[70020]: DEBUG nova.compute.manager [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 721.722924] env[70020]: DEBUG nova.network.neutron [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Successfully created port: 347b7e73-55ed-4f2b-96f7-96ab25367148 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 721.738378] env[70020]: DEBUG oslo_vmware.api [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Task: {'id': task-3618002, 'name': RelocateVM_Task} progress is 20%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.749406] env[70020]: DEBUG oslo_vmware.api [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5272dc62-d6e1-5acb-bb00-2ea483350495, 'name': SearchDatastore_Task, 'duration_secs': 0.011659} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.752672] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0928b3df-5e0b-4539-acde-33e23d1e7174 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.758695] env[70020]: DEBUG oslo_vmware.api [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Waiting for the task: (returnval){ [ 721.758695] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]525e6fc5-1260-9cc9-ac0f-cd6bdf84c184" [ 721.758695] env[70020]: _type = "Task" [ 721.758695] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.766662] env[70020]: DEBUG oslo_vmware.api [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]525e6fc5-1260-9cc9-ac0f-cd6bdf84c184, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.787103] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5b28f93a-4499-4f5d-b2bc-316d52b56450 tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Acquiring lock "6c36df58-3ab3-4595-b89c-9ab5a4664eec" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 721.787103] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5b28f93a-4499-4f5d-b2bc-316d52b56450 tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Lock "6c36df58-3ab3-4595-b89c-9ab5a4664eec" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 721.787103] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5b28f93a-4499-4f5d-b2bc-316d52b56450 tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Acquiring lock "6c36df58-3ab3-4595-b89c-9ab5a4664eec-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 721.787322] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5b28f93a-4499-4f5d-b2bc-316d52b56450 tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Lock "6c36df58-3ab3-4595-b89c-9ab5a4664eec-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 721.788022] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5b28f93a-4499-4f5d-b2bc-316d52b56450 tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Lock "6c36df58-3ab3-4595-b89c-9ab5a4664eec-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 721.789874] env[70020]: INFO nova.compute.manager [None req-5b28f93a-4499-4f5d-b2bc-316d52b56450 tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Terminating instance [ 721.834619] env[70020]: DEBUG nova.compute.manager [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 721.993984] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d98ba5e-702a-4429-9a43-537357046094 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.001792] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2e2434b-0a13-4a69-9e67-f85877e09571 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.036133] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6dbf1df-b48a-4b8f-8d39-342fcbd897ae {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.043593] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-846d150a-6f3d-466a-b966-eabb1a460ed4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.062174] env[70020]: DEBUG nova.compute.provider_tree [None req-6877f9d3-19d9-40c3-9940-10e23bd854cc tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 722.108874] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 722.134305] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c79be9a9-932f-4bf4-8968-96d29d04a08f tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Acquiring lock "19036f6f-2ee3-4ea5-82fa-b510bf903922" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 722.134564] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c79be9a9-932f-4bf4-8968-96d29d04a08f tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Lock "19036f6f-2ee3-4ea5-82fa-b510bf903922" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 722.134805] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c79be9a9-932f-4bf4-8968-96d29d04a08f tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Acquiring lock "19036f6f-2ee3-4ea5-82fa-b510bf903922-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 722.135212] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c79be9a9-932f-4bf4-8968-96d29d04a08f tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Lock "19036f6f-2ee3-4ea5-82fa-b510bf903922-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 722.135330] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c79be9a9-932f-4bf4-8968-96d29d04a08f tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Lock "19036f6f-2ee3-4ea5-82fa-b510bf903922-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 722.138794] env[70020]: INFO nova.compute.manager [None req-c79be9a9-932f-4bf4-8968-96d29d04a08f tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Terminating instance [ 722.235458] env[70020]: DEBUG oslo_vmware.api [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Task: {'id': task-3618002, 'name': RelocateVM_Task, 'duration_secs': 0.59704} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.235458] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Volume attach. Driver type: vmdk {{(pid=70020) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 722.235458] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721586', 'volume_id': '1e88bef3-79b2-4977-918f-5c3ee9732e62', 'name': 'volume-1e88bef3-79b2-4977-918f-5c3ee9732e62', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e', 'attached_at': '', 'detached_at': '', 'volume_id': '1e88bef3-79b2-4977-918f-5c3ee9732e62', 'serial': '1e88bef3-79b2-4977-918f-5c3ee9732e62'} {{(pid=70020) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 722.236330] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6e37b23-3120-4d6e-819f-0955924d5a1f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.256637] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d34c5f7-9c7a-4588-941c-26e6e30cdc67 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.281817] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Reconfiguring VM instance instance-00000021 to attach disk [datastore1] volume-1e88bef3-79b2-4977-918f-5c3ee9732e62/volume-1e88bef3-79b2-4977-918f-5c3ee9732e62.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 722.285615] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-56896548-8fde-4eeb-8713-849eb0c702f2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.300682] env[70020]: DEBUG oslo_vmware.api [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]525e6fc5-1260-9cc9-ac0f-cd6bdf84c184, 'name': SearchDatastore_Task, 'duration_secs': 0.0117} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.301645] env[70020]: DEBUG nova.network.neutron [req-11d469dc-2d98-4d6a-b1e2-a8531dfbe8a5 req-775ca413-7208-410b-8ddd-eb48cdbfb37c service nova] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Updated VIF entry in instance network info cache for port 2944f964-96c9-42c8-8914-3737e1a4349a. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 722.301972] env[70020]: DEBUG nova.network.neutron [req-11d469dc-2d98-4d6a-b1e2-a8531dfbe8a5 req-775ca413-7208-410b-8ddd-eb48cdbfb37c service nova] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Updating instance_info_cache with network_info: [{"id": "2944f964-96c9-42c8-8914-3737e1a4349a", "address": "fa:16:3e:c3:66:11", "network": {"id": "7e64c7c3-a66a-4854-8f1a-153495baabb6", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-192249615-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4988050072ea4c81a69c636049df9e4b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e238ac23-819b-452f-9015-52922e45efd3", "external-id": "nsx-vlan-transportzone-127", "segmentation_id": 127, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2944f964-96", "ovs_interfaceid": "2944f964-96c9-42c8-8914-3737e1a4349a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.304026] env[70020]: DEBUG nova.compute.manager [None req-5b28f93a-4499-4f5d-b2bc-316d52b56450 tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 722.304026] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5b28f93a-4499-4f5d-b2bc-316d52b56450 tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 722.304026] env[70020]: DEBUG oslo_concurrency.lockutils [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 722.304208] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 61bea079-9731-48d1-b472-b30226a0b5a1/61bea079-9731-48d1-b472-b30226a0b5a1.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 722.305182] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a2b991-b223-48ab-961d-186dcdb087f9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.308193] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-39eed5ab-a092-40f4-9c27-f6c86b859ae6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.313292] env[70020]: DEBUG oslo_vmware.api [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Waiting for the task: (returnval){ [ 722.313292] env[70020]: value = "task-3618003" [ 722.313292] env[70020]: _type = "Task" [ 722.313292] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.315768] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b28f93a-4499-4f5d-b2bc-316d52b56450 tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 722.320606] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a5e64312-5952-4516-b7f3-6621f33f4e01 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.322175] env[70020]: DEBUG oslo_vmware.api [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Waiting for the task: (returnval){ [ 722.322175] env[70020]: value = "task-3618004" [ 722.322175] env[70020]: _type = "Task" [ 722.322175] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.331027] env[70020]: DEBUG oslo_vmware.api [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Task: {'id': task-3618003, 'name': ReconfigVM_Task} progress is 10%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.331626] env[70020]: DEBUG oslo_vmware.api [None req-5b28f93a-4499-4f5d-b2bc-316d52b56450 tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Waiting for the task: (returnval){ [ 722.331626] env[70020]: value = "task-3618005" [ 722.331626] env[70020]: _type = "Task" [ 722.331626] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.338419] env[70020]: DEBUG oslo_vmware.api [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': task-3618004, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.347276] env[70020]: DEBUG oslo_vmware.api [None req-5b28f93a-4499-4f5d-b2bc-316d52b56450 tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Task: {'id': task-3618005, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.566814] env[70020]: DEBUG nova.scheduler.client.report [None req-6877f9d3-19d9-40c3-9940-10e23bd854cc tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 722.643195] env[70020]: DEBUG nova.compute.manager [None req-c79be9a9-932f-4bf4-8968-96d29d04a08f tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 722.643489] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c79be9a9-932f-4bf4-8968-96d29d04a08f tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 722.644482] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc466f3b-8831-4208-af89-345b49b80145 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.654992] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c79be9a9-932f-4bf4-8968-96d29d04a08f tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 722.654992] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-db3b582d-76ad-41e2-b7f4-4b76b8390e42 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.662251] env[70020]: DEBUG oslo_vmware.api [None req-c79be9a9-932f-4bf4-8968-96d29d04a08f tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Waiting for the task: (returnval){ [ 722.662251] env[70020]: value = "task-3618006" [ 722.662251] env[70020]: _type = "Task" [ 722.662251] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.673299] env[70020]: DEBUG oslo_vmware.api [None req-c79be9a9-932f-4bf4-8968-96d29d04a08f tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Task: {'id': task-3618006, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.806404] env[70020]: DEBUG oslo_concurrency.lockutils [req-11d469dc-2d98-4d6a-b1e2-a8531dfbe8a5 req-775ca413-7208-410b-8ddd-eb48cdbfb37c service nova] Releasing lock "refresh_cache-b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 722.831098] env[70020]: DEBUG oslo_vmware.api [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Task: {'id': task-3618003, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.841265] env[70020]: DEBUG oslo_vmware.api [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': task-3618004, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.846470] env[70020]: DEBUG oslo_vmware.api [None req-5b28f93a-4499-4f5d-b2bc-316d52b56450 tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Task: {'id': task-3618005, 'name': PowerOffVM_Task, 'duration_secs': 0.228667} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.846836] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b28f93a-4499-4f5d-b2bc-316d52b56450 tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 722.847073] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5b28f93a-4499-4f5d-b2bc-316d52b56450 tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 722.847781] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f6a5db76-387e-467f-9976-8a07a9a12b0c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.850582] env[70020]: DEBUG nova.compute.manager [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 722.878774] env[70020]: DEBUG nova.virt.hardware [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 722.879019] env[70020]: DEBUG nova.virt.hardware [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 722.879193] env[70020]: DEBUG nova.virt.hardware [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 722.879379] env[70020]: DEBUG nova.virt.hardware [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 722.879522] env[70020]: DEBUG nova.virt.hardware [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 722.879663] env[70020]: DEBUG nova.virt.hardware [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 722.879862] env[70020]: DEBUG nova.virt.hardware [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 722.880032] env[70020]: DEBUG nova.virt.hardware [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 722.880225] env[70020]: DEBUG nova.virt.hardware [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 722.880355] env[70020]: DEBUG nova.virt.hardware [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 722.880520] env[70020]: DEBUG nova.virt.hardware [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 722.881445] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9e292c5-5bef-4261-b413-98ab59d7fcf6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.889615] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ea6adc2-48ab-497d-9dd8-9cd6cff8a0d2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.939346] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5b28f93a-4499-4f5d-b2bc-316d52b56450 tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 722.939584] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5b28f93a-4499-4f5d-b2bc-316d52b56450 tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 722.939764] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b28f93a-4499-4f5d-b2bc-316d52b56450 tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Deleting the datastore file [datastore1] 6c36df58-3ab3-4595-b89c-9ab5a4664eec {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 722.940049] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-07f6ac2b-da4a-4396-8d9b-83cefd9687f0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.948825] env[70020]: DEBUG oslo_vmware.api [None req-5b28f93a-4499-4f5d-b2bc-316d52b56450 tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Waiting for the task: (returnval){ [ 722.948825] env[70020]: value = "task-3618008" [ 722.948825] env[70020]: _type = "Task" [ 722.948825] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.957069] env[70020]: DEBUG oslo_vmware.api [None req-5b28f93a-4499-4f5d-b2bc-316d52b56450 tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Task: {'id': task-3618008, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.073332] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6877f9d3-19d9-40c3-9940-10e23bd854cc tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.248s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 723.075797] env[70020]: DEBUG oslo_concurrency.lockutils [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.243s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 723.076153] env[70020]: DEBUG nova.objects.instance [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Lazy-loading 'resources' on Instance uuid 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 723.147125] env[70020]: INFO nova.scheduler.client.report [None req-6877f9d3-19d9-40c3-9940-10e23bd854cc tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Deleted allocations for instance 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764 [ 723.174722] env[70020]: DEBUG oslo_vmware.api [None req-c79be9a9-932f-4bf4-8968-96d29d04a08f tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Task: {'id': task-3618006, 'name': PowerOffVM_Task, 'duration_secs': 0.428539} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.174948] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c79be9a9-932f-4bf4-8968-96d29d04a08f tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 723.178028] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c79be9a9-932f-4bf4-8968-96d29d04a08f tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 723.178028] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4b445321-ec55-49ac-b7c2-97881f0e87c5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.303916] env[70020]: DEBUG nova.network.neutron [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Successfully updated port: 347b7e73-55ed-4f2b-96f7-96ab25367148 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 723.336486] env[70020]: DEBUG oslo_vmware.api [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Task: {'id': task-3618003, 'name': ReconfigVM_Task, 'duration_secs': 0.577912} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.341892] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Reconfigured VM instance instance-00000021 to attach disk [datastore1] volume-1e88bef3-79b2-4977-918f-5c3ee9732e62/volume-1e88bef3-79b2-4977-918f-5c3ee9732e62.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 723.349475] env[70020]: DEBUG oslo_vmware.api [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': task-3618004, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.5406} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.350902] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b515d9c-4f25-4eef-b02c-926faf5b8d1d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.362345] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 61bea079-9731-48d1-b472-b30226a0b5a1/61bea079-9731-48d1-b472-b30226a0b5a1.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 723.362613] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 723.362899] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c79be9a9-932f-4bf4-8968-96d29d04a08f tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 723.363092] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c79be9a9-932f-4bf4-8968-96d29d04a08f tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 723.363291] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-c79be9a9-932f-4bf4-8968-96d29d04a08f tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Deleting the datastore file [datastore1] 19036f6f-2ee3-4ea5-82fa-b510bf903922 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 723.364121] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8dc5e96d-7301-467a-a9eb-7340abdbe2c2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.366389] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d7aea2af-7aed-4472-b453-3c1b9972c600 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.378097] env[70020]: DEBUG oslo_vmware.api [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Waiting for the task: (returnval){ [ 723.378097] env[70020]: value = "task-3618011" [ 723.378097] env[70020]: _type = "Task" [ 723.378097] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.378097] env[70020]: DEBUG oslo_vmware.api [None req-c79be9a9-932f-4bf4-8968-96d29d04a08f tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Waiting for the task: (returnval){ [ 723.378097] env[70020]: value = "task-3618012" [ 723.378097] env[70020]: _type = "Task" [ 723.378097] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.378097] env[70020]: DEBUG oslo_vmware.api [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Waiting for the task: (returnval){ [ 723.378097] env[70020]: value = "task-3618010" [ 723.378097] env[70020]: _type = "Task" [ 723.378097] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.399033] env[70020]: DEBUG oslo_vmware.api [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Task: {'id': task-3618011, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.399033] env[70020]: DEBUG oslo_vmware.api [None req-c79be9a9-932f-4bf4-8968-96d29d04a08f tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Task: {'id': task-3618012, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.403430] env[70020]: DEBUG oslo_vmware.api [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': task-3618010, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.462024] env[70020]: DEBUG oslo_vmware.api [None req-5b28f93a-4499-4f5d-b2bc-316d52b56450 tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Task: {'id': task-3618008, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.224833} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.462024] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b28f93a-4499-4f5d-b2bc-316d52b56450 tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 723.462024] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5b28f93a-4499-4f5d-b2bc-316d52b56450 tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 723.464021] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5b28f93a-4499-4f5d-b2bc-316d52b56450 tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 723.464021] env[70020]: INFO nova.compute.manager [None req-5b28f93a-4499-4f5d-b2bc-316d52b56450 tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Took 1.16 seconds to destroy the instance on the hypervisor. [ 723.464021] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5b28f93a-4499-4f5d-b2bc-316d52b56450 tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 723.464021] env[70020]: DEBUG nova.compute.manager [-] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 723.464021] env[70020]: DEBUG nova.network.neutron [-] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 723.660459] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6877f9d3-19d9-40c3-9940-10e23bd854cc tempest-DeleteServersAdminTestJSON-1322678731 tempest-DeleteServersAdminTestJSON-1322678731-project-member] Lock "7cf7f0a9-8240-4e78-b5d4-b1eb1da60764" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.340s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 723.810046] env[70020]: DEBUG oslo_concurrency.lockutils [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "refresh_cache-13f6daa5-d859-40ed-b1b0-edd7717b8df3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 723.810046] env[70020]: DEBUG oslo_concurrency.lockutils [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquired lock "refresh_cache-13f6daa5-d859-40ed-b1b0-edd7717b8df3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 723.810046] env[70020]: DEBUG nova.network.neutron [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 723.910226] env[70020]: DEBUG oslo_vmware.api [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': task-3618010, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067186} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.910226] env[70020]: DEBUG oslo_vmware.api [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Task: {'id': task-3618011, 'name': ReconfigVM_Task, 'duration_secs': 0.148653} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.910226] env[70020]: DEBUG oslo_vmware.api [None req-c79be9a9-932f-4bf4-8968-96d29d04a08f tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Task: {'id': task-3618012, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.329027} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.912476] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 723.913408] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721586', 'volume_id': '1e88bef3-79b2-4977-918f-5c3ee9732e62', 'name': 'volume-1e88bef3-79b2-4977-918f-5c3ee9732e62', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e', 'attached_at': '', 'detached_at': '', 'volume_id': '1e88bef3-79b2-4977-918f-5c3ee9732e62', 'serial': '1e88bef3-79b2-4977-918f-5c3ee9732e62'} {{(pid=70020) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 723.913408] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-c79be9a9-932f-4bf4-8968-96d29d04a08f tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 723.913683] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c79be9a9-932f-4bf4-8968-96d29d04a08f tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 723.914542] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c79be9a9-932f-4bf4-8968-96d29d04a08f tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 723.914542] env[70020]: INFO nova.compute.manager [None req-c79be9a9-932f-4bf4-8968-96d29d04a08f tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Took 1.27 seconds to destroy the instance on the hypervisor. [ 723.914542] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c79be9a9-932f-4bf4-8968-96d29d04a08f tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 723.915295] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd36ffdc-6866-44bb-a42f-5ca3426d62a6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.917909] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-11d721ae-dd45-4b4b-a675-8845b91c7a51 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.919461] env[70020]: DEBUG nova.compute.manager [-] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 723.919592] env[70020]: DEBUG nova.network.neutron [-] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 723.929779] env[70020]: DEBUG oslo_concurrency.lockutils [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Acquiring lock "36f15b0a-d57f-49d8-9510-1036e889a438" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 723.929779] env[70020]: DEBUG oslo_concurrency.lockutils [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Lock "36f15b0a-d57f-49d8-9510-1036e889a438" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 723.949275] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Reconfiguring VM instance instance-00000022 to attach disk [datastore1] 61bea079-9731-48d1-b472-b30226a0b5a1/61bea079-9731-48d1-b472-b30226a0b5a1.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 723.954544] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-34cbb45a-335e-48f0-9480-58fec6e4293d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.970092] env[70020]: DEBUG oslo_vmware.api [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Waiting for the task: (returnval){ [ 723.970092] env[70020]: value = "task-3618013" [ 723.970092] env[70020]: _type = "Task" [ 723.970092] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.976428] env[70020]: DEBUG oslo_vmware.api [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Waiting for the task: (returnval){ [ 723.976428] env[70020]: value = "task-3618014" [ 723.976428] env[70020]: _type = "Task" [ 723.976428] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.984168] env[70020]: DEBUG oslo_vmware.api [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Task: {'id': task-3618013, 'name': Rename_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.993903] env[70020]: DEBUG oslo_vmware.api [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': task-3618014, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.996286] env[70020]: DEBUG nova.compute.manager [req-7a6d39db-4167-4f2e-bd4f-c956871019d8 req-a9476a0b-65d8-4206-bb09-3b496795b09a service nova] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Received event network-vif-plugged-347b7e73-55ed-4f2b-96f7-96ab25367148 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 723.996356] env[70020]: DEBUG oslo_concurrency.lockutils [req-7a6d39db-4167-4f2e-bd4f-c956871019d8 req-a9476a0b-65d8-4206-bb09-3b496795b09a service nova] Acquiring lock "13f6daa5-d859-40ed-b1b0-edd7717b8df3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 723.996556] env[70020]: DEBUG oslo_concurrency.lockutils [req-7a6d39db-4167-4f2e-bd4f-c956871019d8 req-a9476a0b-65d8-4206-bb09-3b496795b09a service nova] Lock "13f6daa5-d859-40ed-b1b0-edd7717b8df3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 723.996720] env[70020]: DEBUG oslo_concurrency.lockutils [req-7a6d39db-4167-4f2e-bd4f-c956871019d8 req-a9476a0b-65d8-4206-bb09-3b496795b09a service nova] Lock "13f6daa5-d859-40ed-b1b0-edd7717b8df3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 723.996891] env[70020]: DEBUG nova.compute.manager [req-7a6d39db-4167-4f2e-bd4f-c956871019d8 req-a9476a0b-65d8-4206-bb09-3b496795b09a service nova] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] No waiting events found dispatching network-vif-plugged-347b7e73-55ed-4f2b-96f7-96ab25367148 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 723.997168] env[70020]: WARNING nova.compute.manager [req-7a6d39db-4167-4f2e-bd4f-c956871019d8 req-a9476a0b-65d8-4206-bb09-3b496795b09a service nova] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Received unexpected event network-vif-plugged-347b7e73-55ed-4f2b-96f7-96ab25367148 for instance with vm_state building and task_state spawning. [ 723.997220] env[70020]: DEBUG nova.compute.manager [req-7a6d39db-4167-4f2e-bd4f-c956871019d8 req-a9476a0b-65d8-4206-bb09-3b496795b09a service nova] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Received event network-changed-347b7e73-55ed-4f2b-96f7-96ab25367148 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 723.997429] env[70020]: DEBUG nova.compute.manager [req-7a6d39db-4167-4f2e-bd4f-c956871019d8 req-a9476a0b-65d8-4206-bb09-3b496795b09a service nova] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Refreshing instance network info cache due to event network-changed-347b7e73-55ed-4f2b-96f7-96ab25367148. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 723.997508] env[70020]: DEBUG oslo_concurrency.lockutils [req-7a6d39db-4167-4f2e-bd4f-c956871019d8 req-a9476a0b-65d8-4206-bb09-3b496795b09a service nova] Acquiring lock "refresh_cache-13f6daa5-d859-40ed-b1b0-edd7717b8df3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.228899] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b82d926-9bda-4987-aabf-498da29a0683 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.236435] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3e5d04c-463f-442f-8ce5-5106b4c2328f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.271908] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-673c9da6-d97b-4b40-b57a-910841efb9a6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.281200] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0146e133-a8ce-4c6f-92d6-cd9db9c5ebbd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.289714] env[70020]: DEBUG nova.network.neutron [-] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.302044] env[70020]: DEBUG nova.compute.provider_tree [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 724.345746] env[70020]: DEBUG nova.network.neutron [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 724.486326] env[70020]: DEBUG oslo_vmware.api [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Task: {'id': task-3618013, 'name': Rename_Task, 'duration_secs': 0.309835} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.486326] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 724.486624] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d27d284b-4e07-41d8-a960-d34971de4183 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.491917] env[70020]: DEBUG oslo_vmware.api [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': task-3618014, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.497301] env[70020]: DEBUG oslo_vmware.api [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Waiting for the task: (returnval){ [ 724.497301] env[70020]: value = "task-3618015" [ 724.497301] env[70020]: _type = "Task" [ 724.497301] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.504799] env[70020]: DEBUG oslo_vmware.api [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Task: {'id': task-3618015, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.522077] env[70020]: DEBUG nova.network.neutron [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Updating instance_info_cache with network_info: [{"id": "347b7e73-55ed-4f2b-96f7-96ab25367148", "address": "fa:16:3e:a6:c6:fe", "network": {"id": "bff860aa-ea09-4260-84e8-d9ffd2c5bf4b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1893681432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd3733a000724aab9255cb498cecdfba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap347b7e73-55", "ovs_interfaceid": "347b7e73-55ed-4f2b-96f7-96ab25367148", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.776605] env[70020]: DEBUG nova.network.neutron [-] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.792379] env[70020]: INFO nova.compute.manager [-] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Took 1.33 seconds to deallocate network for instance. [ 724.839123] env[70020]: ERROR nova.scheduler.client.report [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [req-1036be47-01f6-488f-9287-cd5f8804eda7] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1036be47-01f6-488f-9287-cd5f8804eda7"}]} [ 724.864342] env[70020]: DEBUG nova.scheduler.client.report [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 724.883221] env[70020]: DEBUG nova.scheduler.client.report [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 724.883435] env[70020]: DEBUG nova.compute.provider_tree [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 724.894027] env[70020]: DEBUG oslo_vmware.rw_handles [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523ea462-6d2d-9c91-cb15-7ee79a2ee284/disk-0.vmdk. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 724.896376] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b98ad5d4-fffd-49e8-882c-7a1cc0008837 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.898678] env[70020]: DEBUG nova.scheduler.client.report [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 724.904594] env[70020]: DEBUG oslo_vmware.rw_handles [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523ea462-6d2d-9c91-cb15-7ee79a2ee284/disk-0.vmdk is in state: ready. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 724.904797] env[70020]: ERROR oslo_vmware.rw_handles [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523ea462-6d2d-9c91-cb15-7ee79a2ee284/disk-0.vmdk due to incomplete transfer. [ 724.905015] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-c8577ff2-5ec0-4bab-9a26-dd45f15b30c3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.913577] env[70020]: DEBUG oslo_vmware.rw_handles [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523ea462-6d2d-9c91-cb15-7ee79a2ee284/disk-0.vmdk. {{(pid=70020) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 724.913779] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Uploaded image 27106013-2716-4959-a8cc-999a0ae152b3 to the Glance image server {{(pid=70020) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 724.916403] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Destroying the VM {{(pid=70020) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 724.916670] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-26a82ae9-10ef-4aae-8596-fed708346c5b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.920562] env[70020]: DEBUG nova.scheduler.client.report [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 724.924419] env[70020]: DEBUG oslo_vmware.api [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 724.924419] env[70020]: value = "task-3618016" [ 724.924419] env[70020]: _type = "Task" [ 724.924419] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.934515] env[70020]: DEBUG oslo_vmware.api [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618016, 'name': Destroy_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.990273] env[70020]: DEBUG oslo_vmware.api [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': task-3618014, 'name': ReconfigVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.007165] env[70020]: DEBUG oslo_vmware.api [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Task: {'id': task-3618015, 'name': PowerOnVM_Task} progress is 88%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.029020] env[70020]: DEBUG oslo_concurrency.lockutils [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Releasing lock "refresh_cache-13f6daa5-d859-40ed-b1b0-edd7717b8df3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 725.029020] env[70020]: DEBUG nova.compute.manager [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Instance network_info: |[{"id": "347b7e73-55ed-4f2b-96f7-96ab25367148", "address": "fa:16:3e:a6:c6:fe", "network": {"id": "bff860aa-ea09-4260-84e8-d9ffd2c5bf4b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1893681432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd3733a000724aab9255cb498cecdfba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap347b7e73-55", "ovs_interfaceid": "347b7e73-55ed-4f2b-96f7-96ab25367148", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 725.029321] env[70020]: DEBUG oslo_concurrency.lockutils [req-7a6d39db-4167-4f2e-bd4f-c956871019d8 req-a9476a0b-65d8-4206-bb09-3b496795b09a service nova] Acquired lock "refresh_cache-13f6daa5-d859-40ed-b1b0-edd7717b8df3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 725.029321] env[70020]: DEBUG nova.network.neutron [req-7a6d39db-4167-4f2e-bd4f-c956871019d8 req-a9476a0b-65d8-4206-bb09-3b496795b09a service nova] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Refreshing network info cache for port 347b7e73-55ed-4f2b-96f7-96ab25367148 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 725.029321] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a6:c6:fe', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3952eb02-1162-48ed-8227-9c138960d583', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '347b7e73-55ed-4f2b-96f7-96ab25367148', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 725.041242] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Creating folder: Project (bd3733a000724aab9255cb498cecdfba). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 725.042967] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bda04002-e084-4352-bf10-9302c76b16d8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.061389] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Created folder: Project (bd3733a000724aab9255cb498cecdfba) in parent group-v721521. [ 725.061594] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Creating folder: Instances. Parent ref: group-v721635. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 725.064624] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b8c0c235-ff4c-4d16-864d-478dc890576a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.075558] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Created folder: Instances in parent group-v721635. [ 725.075847] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 725.076021] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 725.076208] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-34fa22e3-5e65-4585-82aa-f2cf03ee72c5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.101811] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 725.101811] env[70020]: value = "task-3618019" [ 725.101811] env[70020]: _type = "Task" [ 725.101811] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.110752] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618019, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.278312] env[70020]: INFO nova.compute.manager [-] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Took 1.36 seconds to deallocate network for instance. [ 725.298946] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5b28f93a-4499-4f5d-b2bc-316d52b56450 tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 725.438810] env[70020]: DEBUG oslo_vmware.api [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618016, 'name': Destroy_Task} progress is 33%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.489896] env[70020]: DEBUG oslo_vmware.api [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': task-3618014, 'name': ReconfigVM_Task, 'duration_secs': 1.043589} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.493163] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Reconfigured VM instance instance-00000022 to attach disk [datastore1] 61bea079-9731-48d1-b472-b30226a0b5a1/61bea079-9731-48d1-b472-b30226a0b5a1.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 725.494274] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c8da6a74-a900-44a6-a8d8-5e1846db085a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.502111] env[70020]: DEBUG oslo_vmware.api [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Waiting for the task: (returnval){ [ 725.502111] env[70020]: value = "task-3618020" [ 725.502111] env[70020]: _type = "Task" [ 725.502111] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.509284] env[70020]: DEBUG oslo_vmware.api [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Task: {'id': task-3618015, 'name': PowerOnVM_Task, 'duration_secs': 0.828433} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.512657] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 725.513032] env[70020]: INFO nova.compute.manager [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Took 5.93 seconds to spawn the instance on the hypervisor. [ 725.513254] env[70020]: DEBUG nova.compute.manager [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 725.514652] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-423c0d5b-b3f2-4b9a-b781-1bb5ba328388 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.521319] env[70020]: DEBUG oslo_vmware.api [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': task-3618020, 'name': Rename_Task} progress is 10%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.599263] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8fb414f-3bdc-4752-836a-2c8188e82425 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.611022] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-316c37dd-d54b-4264-9352-60a18ec5dcc8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.617899] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618019, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.653771] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bacbcb11-8382-4c88-bf96-623b804a94bc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.662020] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b24ca186-ea09-4975-9c22-b614c58e9095 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.676457] env[70020]: DEBUG nova.compute.provider_tree [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 725.789006] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c79be9a9-932f-4bf4-8968-96d29d04a08f tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 725.807168] env[70020]: DEBUG nova.network.neutron [req-7a6d39db-4167-4f2e-bd4f-c956871019d8 req-a9476a0b-65d8-4206-bb09-3b496795b09a service nova] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Updated VIF entry in instance network info cache for port 347b7e73-55ed-4f2b-96f7-96ab25367148. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 725.807536] env[70020]: DEBUG nova.network.neutron [req-7a6d39db-4167-4f2e-bd4f-c956871019d8 req-a9476a0b-65d8-4206-bb09-3b496795b09a service nova] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Updating instance_info_cache with network_info: [{"id": "347b7e73-55ed-4f2b-96f7-96ab25367148", "address": "fa:16:3e:a6:c6:fe", "network": {"id": "bff860aa-ea09-4260-84e8-d9ffd2c5bf4b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1893681432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd3733a000724aab9255cb498cecdfba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap347b7e73-55", "ovs_interfaceid": "347b7e73-55ed-4f2b-96f7-96ab25367148", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.935265] env[70020]: DEBUG oslo_vmware.api [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618016, 'name': Destroy_Task, 'duration_secs': 0.755338} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.935547] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Destroyed the VM [ 725.935925] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Deleting Snapshot of the VM instance {{(pid=70020) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 725.936044] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a1f2fe6e-5b01-4685-8b9e-0a23ec0bc667 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.942495] env[70020]: DEBUG oslo_vmware.api [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 725.942495] env[70020]: value = "task-3618021" [ 725.942495] env[70020]: _type = "Task" [ 725.942495] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.956082] env[70020]: DEBUG oslo_vmware.api [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618021, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.012454] env[70020]: DEBUG oslo_vmware.api [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': task-3618020, 'name': Rename_Task, 'duration_secs': 0.150204} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.012741] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 726.012991] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5d80051a-2e88-4f67-8c33-6a1a4a6c5226 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.018639] env[70020]: DEBUG oslo_vmware.api [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Waiting for the task: (returnval){ [ 726.018639] env[70020]: value = "task-3618022" [ 726.018639] env[70020]: _type = "Task" [ 726.018639] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.026591] env[70020]: DEBUG oslo_vmware.api [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': task-3618022, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.040525] env[70020]: INFO nova.compute.manager [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Took 43.17 seconds to build instance. [ 726.118202] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618019, 'name': CreateVM_Task, 'duration_secs': 0.62676} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.118202] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 726.118202] env[70020]: DEBUG oslo_concurrency.lockutils [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.118416] env[70020]: DEBUG oslo_concurrency.lockutils [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 726.118774] env[70020]: DEBUG oslo_concurrency.lockutils [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 726.119304] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66ccd280-6ff3-4977-a6ed-fc8e9738394b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.125986] env[70020]: DEBUG oslo_vmware.api [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 726.125986] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52aca892-0d01-e156-aa58-1edf76913496" [ 726.125986] env[70020]: _type = "Task" [ 726.125986] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.138772] env[70020]: DEBUG oslo_vmware.api [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52aca892-0d01-e156-aa58-1edf76913496, 'name': SearchDatastore_Task, 'duration_secs': 0.010068} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.139560] env[70020]: DEBUG oslo_concurrency.lockutils [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 726.139808] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 726.140053] env[70020]: DEBUG oslo_concurrency.lockutils [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.140390] env[70020]: DEBUG oslo_concurrency.lockutils [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 726.140647] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 726.142441] env[70020]: DEBUG nova.compute.manager [req-bf16d643-e5d4-4688-ad6a-f551c3c7daa5 req-4005cad6-00c8-4260-ae27-cbce09c9f0da service nova] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Received event network-vif-deleted-9c1b9c25-fd2c-4379-959d-d103fa9a5848 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 726.142671] env[70020]: DEBUG nova.compute.manager [req-bf16d643-e5d4-4688-ad6a-f551c3c7daa5 req-4005cad6-00c8-4260-ae27-cbce09c9f0da service nova] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Received event network-vif-deleted-ae7ca477-594c-47ae-a235-e80874d36402 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 726.143325] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-df040c16-b081-4dc9-a8fa-1bd427a33549 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.152281] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 726.153300] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 726.153300] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e20ae10-15fa-4d9c-80b9-55faffec0819 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.159767] env[70020]: DEBUG oslo_vmware.api [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 726.159767] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5286bba0-ca26-0aa2-c720-f1b69d47e585" [ 726.159767] env[70020]: _type = "Task" [ 726.159767] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.168324] env[70020]: DEBUG oslo_vmware.api [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5286bba0-ca26-0aa2-c720-f1b69d47e585, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.213537] env[70020]: DEBUG nova.scheduler.client.report [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Updated inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with generation 61 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 726.213810] env[70020]: DEBUG nova.compute.provider_tree [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Updating resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d generation from 61 to 62 during operation: update_inventory {{(pid=70020) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 726.213989] env[70020]: DEBUG nova.compute.provider_tree [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 726.310742] env[70020]: DEBUG oslo_concurrency.lockutils [req-7a6d39db-4167-4f2e-bd4f-c956871019d8 req-a9476a0b-65d8-4206-bb09-3b496795b09a service nova] Releasing lock "refresh_cache-13f6daa5-d859-40ed-b1b0-edd7717b8df3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 726.454108] env[70020]: DEBUG oslo_vmware.api [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618021, 'name': RemoveSnapshot_Task, 'duration_secs': 0.450314} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.454679] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Deleted Snapshot of the VM instance {{(pid=70020) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 726.456027] env[70020]: INFO nova.compute.manager [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Took 14.86 seconds to snapshot the instance on the hypervisor. [ 726.532026] env[70020]: DEBUG oslo_vmware.api [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': task-3618022, 'name': PowerOnVM_Task, 'duration_secs': 0.44473} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.532026] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 726.532026] env[70020]: INFO nova.compute.manager [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Took 6.45 seconds to spawn the instance on the hypervisor. [ 726.532026] env[70020]: DEBUG nova.compute.manager [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 726.532026] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f557243a-8a0f-470e-b198-9db53dede88d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.548025] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4bc43ce2-a952-4899-b5f7-f8a1a59ea442 tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Lock "b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.725s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 726.677511] env[70020]: DEBUG oslo_vmware.api [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5286bba0-ca26-0aa2-c720-f1b69d47e585, 'name': SearchDatastore_Task, 'duration_secs': 0.010733} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.677511] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f69870c-8221-4ca0-a96d-26268c03fab0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.685191] env[70020]: DEBUG oslo_vmware.api [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 726.685191] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52c2aafb-25f8-e8b7-d0b8-616c97527fe6" [ 726.685191] env[70020]: _type = "Task" [ 726.685191] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.693230] env[70020]: DEBUG oslo_vmware.api [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52c2aafb-25f8-e8b7-d0b8-616c97527fe6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.720491] env[70020]: DEBUG oslo_concurrency.lockutils [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.645s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 726.724526] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.749s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.727150] env[70020]: INFO nova.compute.claims [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 726.749616] env[70020]: INFO nova.scheduler.client.report [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Deleted allocations for instance 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb [ 726.790704] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e08742d2-c836-433f-ad73-bfdb58dc2dae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.791019] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e08742d2-c836-433f-ad73-bfdb58dc2dae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.792828] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e08742d2-c836-433f-ad73-bfdb58dc2dae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.793059] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e08742d2-c836-433f-ad73-bfdb58dc2dae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.002s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.793235] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e08742d2-c836-433f-ad73-bfdb58dc2dae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 726.795539] env[70020]: INFO nova.compute.manager [None req-e08742d2-c836-433f-ad73-bfdb58dc2dae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Terminating instance [ 726.959692] env[70020]: DEBUG nova.compute.manager [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Instance disappeared during snapshot {{(pid=70020) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 726.972356] env[70020]: DEBUG nova.compute.manager [None req-6f12a48d-b524-4ae7-b115-e5df3c47ffae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Image not found during clean up 27106013-2716-4959-a8cc-999a0ae152b3 {{(pid=70020) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4601}} [ 727.049701] env[70020]: INFO nova.compute.manager [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Took 40.56 seconds to build instance. [ 727.050632] env[70020]: DEBUG nova.compute.manager [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 727.198286] env[70020]: DEBUG oslo_vmware.api [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52c2aafb-25f8-e8b7-d0b8-616c97527fe6, 'name': SearchDatastore_Task, 'duration_secs': 0.009939} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.198616] env[70020]: DEBUG oslo_concurrency.lockutils [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 727.198889] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 13f6daa5-d859-40ed-b1b0-edd7717b8df3/13f6daa5-d859-40ed-b1b0-edd7717b8df3.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 727.199184] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8fb6430e-2cef-43f7-9dc5-4e3c39aa32d9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.206971] env[70020]: DEBUG oslo_vmware.api [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 727.206971] env[70020]: value = "task-3618023" [ 727.206971] env[70020]: _type = "Task" [ 727.206971] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.215080] env[70020]: DEBUG oslo_vmware.api [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618023, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.262666] env[70020]: DEBUG oslo_concurrency.lockutils [None req-53591ce8-7b34-49f0-a497-e14010dc2509 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Lock "50ce7a0c-aa80-4816-b84e-d8ff7b10fffb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.970s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 727.267191] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2cbe7600-35f6-4f3e-a56e-8a7a2adbc90b tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Lock "50ce7a0c-aa80-4816-b84e-d8ff7b10fffb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 30.848s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 727.267468] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2cbe7600-35f6-4f3e-a56e-8a7a2adbc90b tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Acquiring lock "50ce7a0c-aa80-4816-b84e-d8ff7b10fffb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 727.267778] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2cbe7600-35f6-4f3e-a56e-8a7a2adbc90b tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Lock "50ce7a0c-aa80-4816-b84e-d8ff7b10fffb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 727.267990] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2cbe7600-35f6-4f3e-a56e-8a7a2adbc90b tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Lock "50ce7a0c-aa80-4816-b84e-d8ff7b10fffb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 727.270373] env[70020]: INFO nova.compute.manager [None req-2cbe7600-35f6-4f3e-a56e-8a7a2adbc90b tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Terminating instance [ 727.299680] env[70020]: DEBUG nova.compute.manager [None req-e08742d2-c836-433f-ad73-bfdb58dc2dae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 727.299965] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e08742d2-c836-433f-ad73-bfdb58dc2dae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 727.301075] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0168784-0d0f-4e79-a63e-1d9141fa190d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.310633] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e08742d2-c836-433f-ad73-bfdb58dc2dae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 727.310904] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e03bcd3-67cb-4eec-a533-c82102dbe688 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.328434] env[70020]: DEBUG oslo_concurrency.lockutils [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Acquiring lock "b99195a6-866e-4142-970a-42a0564889ef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 727.328663] env[70020]: DEBUG oslo_concurrency.lockutils [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Lock "b99195a6-866e-4142-970a-42a0564889ef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 727.377267] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e08742d2-c836-433f-ad73-bfdb58dc2dae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 727.377424] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e08742d2-c836-433f-ad73-bfdb58dc2dae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 727.377656] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-e08742d2-c836-433f-ad73-bfdb58dc2dae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Deleting the datastore file [datastore2] 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 727.377974] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e6cc322e-7554-4bea-9ff7-b2bb119f00e9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.385798] env[70020]: DEBUG oslo_vmware.api [None req-e08742d2-c836-433f-ad73-bfdb58dc2dae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 727.385798] env[70020]: value = "task-3618025" [ 727.385798] env[70020]: _type = "Task" [ 727.385798] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.395528] env[70020]: DEBUG oslo_vmware.api [None req-e08742d2-c836-433f-ad73-bfdb58dc2dae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618025, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.451142] env[70020]: DEBUG nova.compute.manager [req-2e22c215-891b-4966-bea3-2fbbc30405f9 req-968431b4-9c2f-4570-8636-391fc948536d service nova] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Received event network-changed-2944f964-96c9-42c8-8914-3737e1a4349a {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 727.451142] env[70020]: DEBUG nova.compute.manager [req-2e22c215-891b-4966-bea3-2fbbc30405f9 req-968431b4-9c2f-4570-8636-391fc948536d service nova] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Refreshing instance network info cache due to event network-changed-2944f964-96c9-42c8-8914-3737e1a4349a. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 727.451646] env[70020]: DEBUG oslo_concurrency.lockutils [req-2e22c215-891b-4966-bea3-2fbbc30405f9 req-968431b4-9c2f-4570-8636-391fc948536d service nova] Acquiring lock "refresh_cache-b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.451855] env[70020]: DEBUG oslo_concurrency.lockutils [req-2e22c215-891b-4966-bea3-2fbbc30405f9 req-968431b4-9c2f-4570-8636-391fc948536d service nova] Acquired lock "refresh_cache-b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 727.451991] env[70020]: DEBUG nova.network.neutron [req-2e22c215-891b-4966-bea3-2fbbc30405f9 req-968431b4-9c2f-4570-8636-391fc948536d service nova] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Refreshing network info cache for port 2944f964-96c9-42c8-8914-3737e1a4349a {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 727.515779] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "f56e88f6-3a25-44d9-bdb1-cc4291169c9c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 727.516895] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "f56e88f6-3a25-44d9-bdb1-cc4291169c9c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 727.554132] env[70020]: DEBUG oslo_concurrency.lockutils [None req-09d4577a-c6bf-422c-9c6c-a394f858f4c1 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Lock "61bea079-9731-48d1-b472-b30226a0b5a1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.836s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 727.582476] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 727.717591] env[70020]: DEBUG oslo_vmware.api [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618023, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.492615} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.717844] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 13f6daa5-d859-40ed-b1b0-edd7717b8df3/13f6daa5-d859-40ed-b1b0-edd7717b8df3.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 727.718066] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 727.718321] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-71a7c265-3993-4605-8462-a84013cd96a0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.724522] env[70020]: DEBUG oslo_vmware.api [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 727.724522] env[70020]: value = "task-3618026" [ 727.724522] env[70020]: _type = "Task" [ 727.724522] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.732618] env[70020]: DEBUG oslo_vmware.api [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618026, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.777895] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2cbe7600-35f6-4f3e-a56e-8a7a2adbc90b tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Acquiring lock "refresh_cache-50ce7a0c-aa80-4816-b84e-d8ff7b10fffb" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.778085] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2cbe7600-35f6-4f3e-a56e-8a7a2adbc90b tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Acquired lock "refresh_cache-50ce7a0c-aa80-4816-b84e-d8ff7b10fffb" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 727.778315] env[70020]: DEBUG nova.network.neutron [None req-2cbe7600-35f6-4f3e-a56e-8a7a2adbc90b tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 727.899506] env[70020]: DEBUG oslo_vmware.api [None req-e08742d2-c836-433f-ad73-bfdb58dc2dae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618025, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.385436} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.899805] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-e08742d2-c836-433f-ad73-bfdb58dc2dae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 727.899980] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e08742d2-c836-433f-ad73-bfdb58dc2dae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 727.900175] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e08742d2-c836-433f-ad73-bfdb58dc2dae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 727.900483] env[70020]: INFO nova.compute.manager [None req-e08742d2-c836-433f-ad73-bfdb58dc2dae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Took 0.60 seconds to destroy the instance on the hypervisor. [ 727.900755] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e08742d2-c836-433f-ad73-bfdb58dc2dae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 727.900999] env[70020]: DEBUG nova.compute.manager [-] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 727.901111] env[70020]: DEBUG nova.network.neutron [-] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 728.068239] env[70020]: DEBUG nova.compute.manager [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 728.236893] env[70020]: DEBUG oslo_vmware.api [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618026, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073235} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.237168] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 728.240509] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52c6eefb-6888-4194-a016-c11809feef0b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.272753] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Reconfiguring VM instance instance-00000023 to attach disk [datastore1] 13f6daa5-d859-40ed-b1b0-edd7717b8df3/13f6daa5-d859-40ed-b1b0-edd7717b8df3.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 728.276037] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fba87e3d-218c-45fe-8d4a-2002a57f0f18 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.298135] env[70020]: DEBUG nova.compute.utils [None req-2cbe7600-35f6-4f3e-a56e-8a7a2adbc90b tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Can not refresh info_cache because instance was not found {{(pid=70020) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1056}} [ 728.312918] env[70020]: DEBUG oslo_vmware.api [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 728.312918] env[70020]: value = "task-3618027" [ 728.312918] env[70020]: _type = "Task" [ 728.312918] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.322374] env[70020]: DEBUG oslo_vmware.api [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618027, 'name': ReconfigVM_Task} progress is 10%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.333061] env[70020]: DEBUG nova.network.neutron [None req-2cbe7600-35f6-4f3e-a56e-8a7a2adbc90b tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 728.369315] env[70020]: DEBUG nova.network.neutron [req-2e22c215-891b-4966-bea3-2fbbc30405f9 req-968431b4-9c2f-4570-8636-391fc948536d service nova] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Updated VIF entry in instance network info cache for port 2944f964-96c9-42c8-8914-3737e1a4349a. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 728.369674] env[70020]: DEBUG nova.network.neutron [req-2e22c215-891b-4966-bea3-2fbbc30405f9 req-968431b4-9c2f-4570-8636-391fc948536d service nova] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Updating instance_info_cache with network_info: [{"id": "2944f964-96c9-42c8-8914-3737e1a4349a", "address": "fa:16:3e:c3:66:11", "network": {"id": "7e64c7c3-a66a-4854-8f1a-153495baabb6", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-192249615-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.238", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4988050072ea4c81a69c636049df9e4b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e238ac23-819b-452f-9015-52922e45efd3", "external-id": "nsx-vlan-transportzone-127", "segmentation_id": 127, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2944f964-96", "ovs_interfaceid": "2944f964-96c9-42c8-8914-3737e1a4349a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 728.375228] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-662fae4f-d44e-4219-b2e1-71983a6e888e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.386837] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ae7fa0f-11fe-493c-9c56-27f1547115ce {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.422059] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2ee78e7-e530-40c9-9a8e-5eece47e78a6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.430085] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ff34ead-3ef9-4f24-8deb-0267e710b016 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.435731] env[70020]: DEBUG nova.network.neutron [None req-2cbe7600-35f6-4f3e-a56e-8a7a2adbc90b tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 728.446859] env[70020]: DEBUG nova.compute.provider_tree [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 728.564256] env[70020]: DEBUG nova.compute.manager [req-c68f5a23-3d73-46f8-826c-5e801fbb226e req-fde07cc7-cb68-4b73-9497-a51926f41266 service nova] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Received event network-vif-deleted-dbdfc4ab-6655-403a-8fa4-9d2cd2e84728 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 728.564256] env[70020]: INFO nova.compute.manager [req-c68f5a23-3d73-46f8-826c-5e801fbb226e req-fde07cc7-cb68-4b73-9497-a51926f41266 service nova] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Neutron deleted interface dbdfc4ab-6655-403a-8fa4-9d2cd2e84728; detaching it from the instance and deleting it from the info cache [ 728.564256] env[70020]: DEBUG nova.network.neutron [req-c68f5a23-3d73-46f8-826c-5e801fbb226e req-fde07cc7-cb68-4b73-9497-a51926f41266 service nova] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 728.587699] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 728.668151] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6833ed38-80d2-49ae-9f06-52612927c4a9 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Acquiring lock "d601179a-df77-4f2e-b8df-9185b8a485e3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 728.668151] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6833ed38-80d2-49ae-9f06-52612927c4a9 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Lock "d601179a-df77-4f2e-b8df-9185b8a485e3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 728.668151] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6833ed38-80d2-49ae-9f06-52612927c4a9 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Acquiring lock "d601179a-df77-4f2e-b8df-9185b8a485e3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 728.668151] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6833ed38-80d2-49ae-9f06-52612927c4a9 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Lock "d601179a-df77-4f2e-b8df-9185b8a485e3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 728.668443] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6833ed38-80d2-49ae-9f06-52612927c4a9 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Lock "d601179a-df77-4f2e-b8df-9185b8a485e3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 728.669785] env[70020]: INFO nova.compute.manager [None req-6833ed38-80d2-49ae-9f06-52612927c4a9 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Terminating instance [ 728.794601] env[70020]: DEBUG nova.network.neutron [-] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 728.823548] env[70020]: DEBUG oslo_vmware.api [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618027, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.873344] env[70020]: DEBUG oslo_concurrency.lockutils [req-2e22c215-891b-4966-bea3-2fbbc30405f9 req-968431b4-9c2f-4570-8636-391fc948536d service nova] Releasing lock "refresh_cache-b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 728.949529] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2cbe7600-35f6-4f3e-a56e-8a7a2adbc90b tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Releasing lock "refresh_cache-50ce7a0c-aa80-4816-b84e-d8ff7b10fffb" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 728.950173] env[70020]: DEBUG nova.compute.manager [None req-2cbe7600-35f6-4f3e-a56e-8a7a2adbc90b tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 728.950497] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2cbe7600-35f6-4f3e-a56e-8a7a2adbc90b tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 728.953889] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0a03cd54-bce9-4132-a62b-d264ca9790a7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.963655] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f11de5bd-d0cc-4e74-aa8e-3cc8e1f76077 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.974789] env[70020]: ERROR nova.scheduler.client.report [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [req-456110c4-7393-4df6-bd0b-b5351614f833] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-456110c4-7393-4df6-bd0b-b5351614f833"}]} [ 728.998761] env[70020]: WARNING nova.virt.vmwareapi.vmops [None req-2cbe7600-35f6-4f3e-a56e-8a7a2adbc90b tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb could not be found. [ 728.999061] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2cbe7600-35f6-4f3e-a56e-8a7a2adbc90b tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 728.999258] env[70020]: INFO nova.compute.manager [None req-2cbe7600-35f6-4f3e-a56e-8a7a2adbc90b tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Took 0.05 seconds to destroy the instance on the hypervisor. [ 728.999659] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2cbe7600-35f6-4f3e-a56e-8a7a2adbc90b tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 729.000784] env[70020]: DEBUG nova.scheduler.client.report [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 729.003430] env[70020]: DEBUG nova.compute.manager [-] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 729.003524] env[70020]: DEBUG nova.network.neutron [-] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 729.019881] env[70020]: DEBUG nova.scheduler.client.report [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 729.020130] env[70020]: DEBUG nova.compute.provider_tree [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 729.023727] env[70020]: DEBUG nova.network.neutron [-] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 729.034273] env[70020]: DEBUG nova.scheduler.client.report [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 729.054533] env[70020]: DEBUG nova.scheduler.client.report [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 729.066984] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d968c63e-395f-4fde-aeba-01b82e2331d4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.076319] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cff420f-f493-4226-8ea2-469eb956fadf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.113534] env[70020]: DEBUG nova.compute.manager [req-c68f5a23-3d73-46f8-826c-5e801fbb226e req-fde07cc7-cb68-4b73-9497-a51926f41266 service nova] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Detach interface failed, port_id=dbdfc4ab-6655-403a-8fa4-9d2cd2e84728, reason: Instance 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 729.175735] env[70020]: DEBUG nova.compute.manager [None req-6833ed38-80d2-49ae-9f06-52612927c4a9 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 729.175735] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6833ed38-80d2-49ae-9f06-52612927c4a9 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 729.176125] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-004ee3d4-31ce-4ee5-8937-516c53fb1cfe {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.185906] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-6833ed38-80d2-49ae-9f06-52612927c4a9 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 729.186200] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2bd4a945-85d9-4b79-af05-5d2cf6ec07b3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.193144] env[70020]: DEBUG oslo_vmware.api [None req-6833ed38-80d2-49ae-9f06-52612927c4a9 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Waiting for the task: (returnval){ [ 729.193144] env[70020]: value = "task-3618028" [ 729.193144] env[70020]: _type = "Task" [ 729.193144] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.203767] env[70020]: DEBUG oslo_vmware.api [None req-6833ed38-80d2-49ae-9f06-52612927c4a9 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': task-3618028, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.298899] env[70020]: INFO nova.compute.manager [-] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Took 1.40 seconds to deallocate network for instance. [ 729.324970] env[70020]: DEBUG oslo_vmware.api [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618027, 'name': ReconfigVM_Task, 'duration_secs': 0.756676} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.328432] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Reconfigured VM instance instance-00000023 to attach disk [datastore1] 13f6daa5-d859-40ed-b1b0-edd7717b8df3/13f6daa5-d859-40ed-b1b0-edd7717b8df3.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 729.329595] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5a1654bd-ecea-4fd3-a538-08597c8df5da {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.337219] env[70020]: DEBUG oslo_vmware.api [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 729.337219] env[70020]: value = "task-3618029" [ 729.337219] env[70020]: _type = "Task" [ 729.337219] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.350611] env[70020]: DEBUG oslo_vmware.api [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618029, 'name': Rename_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.516293] env[70020]: INFO nova.compute.manager [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Rebuilding instance [ 729.525572] env[70020]: DEBUG nova.network.neutron [-] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.553502] env[70020]: DEBUG nova.compute.manager [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 729.554394] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66322d95-4774-424f-a6e1-7ef19b01b1a0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.592339] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d9cf85f-141d-443f-be4b-cdc19466edfa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.600264] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cfa8fd5-557f-42ac-8374-8b979b4c564d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.635628] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-001490e5-4a61-4a00-bbed-65f1cde67370 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.643824] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8359c8cd-dbb2-48f4-ab52-2187d5cf9fa9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.660600] env[70020]: DEBUG nova.compute.provider_tree [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 729.703052] env[70020]: DEBUG oslo_vmware.api [None req-6833ed38-80d2-49ae-9f06-52612927c4a9 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': task-3618028, 'name': PowerOffVM_Task, 'duration_secs': 0.261714} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.703414] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-6833ed38-80d2-49ae-9f06-52612927c4a9 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 729.703625] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6833ed38-80d2-49ae-9f06-52612927c4a9 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 729.703909] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b5509cdb-8ae9-42e0-915c-d57c8468580d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.791013] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6833ed38-80d2-49ae-9f06-52612927c4a9 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 729.791277] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6833ed38-80d2-49ae-9f06-52612927c4a9 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 729.791458] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-6833ed38-80d2-49ae-9f06-52612927c4a9 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Deleting the datastore file [datastore2] d601179a-df77-4f2e-b8df-9185b8a485e3 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 729.791862] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e275b1b2-53d6-44f4-871e-9fb9932fab91 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.798575] env[70020]: DEBUG oslo_vmware.api [None req-6833ed38-80d2-49ae-9f06-52612927c4a9 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Waiting for the task: (returnval){ [ 729.798575] env[70020]: value = "task-3618031" [ 729.798575] env[70020]: _type = "Task" [ 729.798575] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.805061] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e08742d2-c836-433f-ad73-bfdb58dc2dae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 729.808281] env[70020]: DEBUG oslo_vmware.api [None req-6833ed38-80d2-49ae-9f06-52612927c4a9 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': task-3618031, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.846597] env[70020]: DEBUG oslo_vmware.api [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618029, 'name': Rename_Task, 'duration_secs': 0.180792} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.846863] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 729.847112] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9d296f81-4995-4f8d-87b9-f050f6972775 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.853271] env[70020]: DEBUG oslo_vmware.api [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 729.853271] env[70020]: value = "task-3618032" [ 729.853271] env[70020]: _type = "Task" [ 729.853271] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.861043] env[70020]: DEBUG oslo_vmware.api [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618032, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.030375] env[70020]: INFO nova.compute.manager [-] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Took 1.03 seconds to deallocate network for instance. [ 730.192097] env[70020]: DEBUG nova.scheduler.client.report [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Updated inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with generation 64 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 730.192479] env[70020]: DEBUG nova.compute.provider_tree [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Updating resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d generation from 64 to 65 during operation: update_inventory {{(pid=70020) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 730.192710] env[70020]: DEBUG nova.compute.provider_tree [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 730.308310] env[70020]: DEBUG oslo_vmware.api [None req-6833ed38-80d2-49ae-9f06-52612927c4a9 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Task: {'id': task-3618031, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134484} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.308604] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-6833ed38-80d2-49ae-9f06-52612927c4a9 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 730.308817] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6833ed38-80d2-49ae-9f06-52612927c4a9 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 730.308985] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6833ed38-80d2-49ae-9f06-52612927c4a9 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 730.309163] env[70020]: INFO nova.compute.manager [None req-6833ed38-80d2-49ae-9f06-52612927c4a9 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Took 1.13 seconds to destroy the instance on the hypervisor. [ 730.310041] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6833ed38-80d2-49ae-9f06-52612927c4a9 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 730.310041] env[70020]: DEBUG nova.compute.manager [-] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 730.310041] env[70020]: DEBUG nova.network.neutron [-] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 730.363491] env[70020]: DEBUG oslo_vmware.api [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618032, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.536490] env[70020]: INFO nova.compute.manager [None req-2cbe7600-35f6-4f3e-a56e-8a7a2adbc90b tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Instance disappeared during terminate [ 730.536747] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2cbe7600-35f6-4f3e-a56e-8a7a2adbc90b tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Lock "50ce7a0c-aa80-4816-b84e-d8ff7b10fffb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.273s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 730.571032] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 730.571032] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e9d5c4c8-13f2-4f1d-bebc-57f3ef25d25a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.579248] env[70020]: DEBUG oslo_vmware.api [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Waiting for the task: (returnval){ [ 730.579248] env[70020]: value = "task-3618033" [ 730.579248] env[70020]: _type = "Task" [ 730.579248] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.587676] env[70020]: DEBUG oslo_vmware.api [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': task-3618033, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.590203] env[70020]: DEBUG nova.compute.manager [req-c35806c5-abf4-406e-8c5c-020d828ba392 req-33181f05-7830-4442-82bc-96260f1c5b23 service nova] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Received event network-vif-deleted-4e709a63-45c3-48e8-8762-26e149c61266 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 730.590497] env[70020]: INFO nova.compute.manager [req-c35806c5-abf4-406e-8c5c-020d828ba392 req-33181f05-7830-4442-82bc-96260f1c5b23 service nova] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Neutron deleted interface 4e709a63-45c3-48e8-8762-26e149c61266; detaching it from the instance and deleting it from the info cache [ 730.590776] env[70020]: DEBUG nova.network.neutron [req-c35806c5-abf4-406e-8c5c-020d828ba392 req-33181f05-7830-4442-82bc-96260f1c5b23 service nova] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.697968] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.973s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 730.698565] env[70020]: DEBUG nova.compute.manager [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 730.702361] env[70020]: DEBUG oslo_concurrency.lockutils [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.764s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 730.702607] env[70020]: DEBUG nova.objects.instance [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Lazy-loading 'resources' on Instance uuid c4335d00-29a3-4f2e-b826-1a78ef02e0bf {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 730.864446] env[70020]: DEBUG oslo_vmware.api [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618032, 'name': PowerOnVM_Task, 'duration_secs': 0.995721} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.865514] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 730.865514] env[70020]: INFO nova.compute.manager [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Took 8.01 seconds to spawn the instance on the hypervisor. [ 730.865514] env[70020]: DEBUG nova.compute.manager [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 730.865915] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-928512c5-353d-44d2-9989-c32dcbcd697c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.042440] env[70020]: DEBUG nova.network.neutron [-] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.088695] env[70020]: DEBUG oslo_vmware.api [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': task-3618033, 'name': PowerOffVM_Task, 'duration_secs': 0.112141} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.089058] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 731.089732] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 731.090490] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c094e34-702a-493d-b0a0-c749429b3672 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.094632] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5691edcf-a10e-404b-aa77-4e63718f1f7b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.098208] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 731.098701] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-28cb20c8-a84c-433e-924c-d45adb5ffd41 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.104588] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39ee7e27-4986-4dc0-a62f-27488213d29c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.121545] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 731.121754] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 731.121930] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Deleting the datastore file [datastore1] 61bea079-9731-48d1-b472-b30226a0b5a1 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 731.122187] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8b82870f-404a-4fc5-8ebc-9b8355ff59dc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.135795] env[70020]: DEBUG nova.compute.manager [req-c35806c5-abf4-406e-8c5c-020d828ba392 req-33181f05-7830-4442-82bc-96260f1c5b23 service nova] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Detach interface failed, port_id=4e709a63-45c3-48e8-8762-26e149c61266, reason: Instance d601179a-df77-4f2e-b8df-9185b8a485e3 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 731.137351] env[70020]: DEBUG oslo_vmware.api [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Waiting for the task: (returnval){ [ 731.137351] env[70020]: value = "task-3618035" [ 731.137351] env[70020]: _type = "Task" [ 731.137351] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.144029] env[70020]: DEBUG oslo_vmware.api [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': task-3618035, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.206213] env[70020]: DEBUG nova.compute.utils [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 731.211075] env[70020]: DEBUG nova.compute.manager [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 731.211075] env[70020]: DEBUG nova.network.neutron [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 731.271235] env[70020]: DEBUG nova.policy [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a2a633598fe84159b970241c87588a02', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd3b44270b4b74bdba8befc7bc5f55e52', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 731.384713] env[70020]: INFO nova.compute.manager [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Took 39.69 seconds to build instance. [ 731.545612] env[70020]: INFO nova.compute.manager [-] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Took 1.24 seconds to deallocate network for instance. [ 731.642936] env[70020]: DEBUG nova.network.neutron [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Successfully created port: acad913d-4d60-4211-8b2f-e30f6f1d525c {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 731.654260] env[70020]: DEBUG oslo_vmware.api [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': task-3618035, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.094026} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.654260] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 731.654260] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 731.654260] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 731.713825] env[70020]: DEBUG nova.compute.manager [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 731.749284] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00381fda-5f8e-473c-ab79-b63c7a484c30 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.757967] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad9854e1-81bd-4c50-aded-484bb3fc2ad5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.791537] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3986cf55-04f0-4519-ae47-051f8dd226d7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.798980] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb4d846b-0e12-4340-ba85-b5401bc8cf70 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.812565] env[70020]: DEBUG nova.compute.provider_tree [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 731.887542] env[70020]: DEBUG oslo_concurrency.lockutils [None req-55f57f7e-f086-4985-9b92-120bb80b0762 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "13f6daa5-d859-40ed-b1b0-edd7717b8df3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.685s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 732.056105] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6833ed38-80d2-49ae-9f06-52612927c4a9 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 732.334029] env[70020]: ERROR nova.scheduler.client.report [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] [req-d47fb691-3baf-47f5-a5c5-139f01c3860a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d47fb691-3baf-47f5-a5c5-139f01c3860a"}]} [ 732.352851] env[70020]: DEBUG nova.scheduler.client.report [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 732.365157] env[70020]: DEBUG nova.scheduler.client.report [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 732.365458] env[70020]: DEBUG nova.compute.provider_tree [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 732.378437] env[70020]: DEBUG nova.scheduler.client.report [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 732.389717] env[70020]: DEBUG nova.compute.manager [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 732.393766] env[70020]: DEBUG nova.scheduler.client.report [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 732.697019] env[70020]: DEBUG nova.virt.hardware [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 732.697019] env[70020]: DEBUG nova.virt.hardware [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 732.697019] env[70020]: DEBUG nova.virt.hardware [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 732.697221] env[70020]: DEBUG nova.virt.hardware [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 732.697221] env[70020]: DEBUG nova.virt.hardware [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 732.697221] env[70020]: DEBUG nova.virt.hardware [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 732.697221] env[70020]: DEBUG nova.virt.hardware [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 732.697221] env[70020]: DEBUG nova.virt.hardware [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 732.697358] env[70020]: DEBUG nova.virt.hardware [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 732.697358] env[70020]: DEBUG nova.virt.hardware [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 732.697358] env[70020]: DEBUG nova.virt.hardware [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 732.697591] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58f8f22c-5aba-4cbb-b081-e6d02a8aad4a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.706449] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bfaa577-483d-4966-8548-b7cad23ccda1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.723548] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Instance VIF info [] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 732.729041] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 732.731604] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 732.733064] env[70020]: DEBUG nova.compute.manager [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 732.734887] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6749d958-21ff-4969-a2bb-b2e136475cba {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.755973] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 732.755973] env[70020]: value = "task-3618036" [ 732.755973] env[70020]: _type = "Task" [ 732.755973] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.760973] env[70020]: DEBUG nova.virt.hardware [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 732.761262] env[70020]: DEBUG nova.virt.hardware [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 732.761421] env[70020]: DEBUG nova.virt.hardware [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 732.761601] env[70020]: DEBUG nova.virt.hardware [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 732.761742] env[70020]: DEBUG nova.virt.hardware [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 732.761883] env[70020]: DEBUG nova.virt.hardware [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 732.762103] env[70020]: DEBUG nova.virt.hardware [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 732.762263] env[70020]: DEBUG nova.virt.hardware [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 732.762422] env[70020]: DEBUG nova.virt.hardware [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 732.762579] env[70020]: DEBUG nova.virt.hardware [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 732.762749] env[70020]: DEBUG nova.virt.hardware [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 732.763507] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05bfa8e4-6d34-4502-be69-fdf1180042f6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.773266] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618036, 'name': CreateVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.776288] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13b9a846-29e6-401b-b629-60fe361ff970 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.911217] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41611365-5b1f-4f49-828a-1bd6166a9a62 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.919068] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-220c9c45-69b5-4291-8944-0c68d5506e5f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.923452] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 732.953456] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac72454e-bfb0-4d04-b7d8-0da475a828dd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.961133] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22a9575e-df4a-4f68-95c7-1f7abf5d90f1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.975493] env[70020]: DEBUG nova.compute.provider_tree [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 733.270066] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618036, 'name': CreateVM_Task, 'duration_secs': 0.245271} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.270066] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 733.270066] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.270066] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 733.270066] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 733.270066] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ff8f046-b9fc-4366-b8a5-55fb74ed2980 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.273663] env[70020]: DEBUG oslo_vmware.api [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Waiting for the task: (returnval){ [ 733.273663] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52df39db-2f94-0b36-3fd6-76c5e0b27c1f" [ 733.273663] env[70020]: _type = "Task" [ 733.273663] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.278963] env[70020]: DEBUG nova.compute.manager [req-4c290f2d-835f-45ab-a8c5-08fc7a7c6f15 req-0bb8f501-faf9-4cc7-b8de-ce742ec5d58f service nova] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Received event network-vif-plugged-acad913d-4d60-4211-8b2f-e30f6f1d525c {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 733.279237] env[70020]: DEBUG oslo_concurrency.lockutils [req-4c290f2d-835f-45ab-a8c5-08fc7a7c6f15 req-0bb8f501-faf9-4cc7-b8de-ce742ec5d58f service nova] Acquiring lock "16c45b86-317a-4d0c-a402-51c85af37a5b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 733.279463] env[70020]: DEBUG oslo_concurrency.lockutils [req-4c290f2d-835f-45ab-a8c5-08fc7a7c6f15 req-0bb8f501-faf9-4cc7-b8de-ce742ec5d58f service nova] Lock "16c45b86-317a-4d0c-a402-51c85af37a5b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 733.279647] env[70020]: DEBUG oslo_concurrency.lockutils [req-4c290f2d-835f-45ab-a8c5-08fc7a7c6f15 req-0bb8f501-faf9-4cc7-b8de-ce742ec5d58f service nova] Lock "16c45b86-317a-4d0c-a402-51c85af37a5b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 733.279815] env[70020]: DEBUG nova.compute.manager [req-4c290f2d-835f-45ab-a8c5-08fc7a7c6f15 req-0bb8f501-faf9-4cc7-b8de-ce742ec5d58f service nova] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] No waiting events found dispatching network-vif-plugged-acad913d-4d60-4211-8b2f-e30f6f1d525c {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 733.279976] env[70020]: WARNING nova.compute.manager [req-4c290f2d-835f-45ab-a8c5-08fc7a7c6f15 req-0bb8f501-faf9-4cc7-b8de-ce742ec5d58f service nova] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Received unexpected event network-vif-plugged-acad913d-4d60-4211-8b2f-e30f6f1d525c for instance with vm_state building and task_state spawning. [ 733.285689] env[70020]: DEBUG oslo_vmware.api [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52df39db-2f94-0b36-3fd6-76c5e0b27c1f, 'name': SearchDatastore_Task, 'duration_secs': 0.010148} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.285951] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 733.286178] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 733.286398] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.286748] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 733.286748] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 733.286951] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-65fd0f68-f100-476b-86cf-e34c5e39c2be {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.294479] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 733.294698] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 733.295357] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fdd40db6-c272-4bcc-9dfc-9307a08769f6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.300701] env[70020]: DEBUG oslo_vmware.api [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Waiting for the task: (returnval){ [ 733.300701] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5223475c-c5a9-eb03-af37-aad36fdf0c3d" [ 733.300701] env[70020]: _type = "Task" [ 733.300701] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.308447] env[70020]: DEBUG oslo_vmware.api [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5223475c-c5a9-eb03-af37-aad36fdf0c3d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.373174] env[70020]: DEBUG nova.network.neutron [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Successfully updated port: acad913d-4d60-4211-8b2f-e30f6f1d525c {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 733.425638] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "f7a42358-f26a-4651-a929-d3836f050648" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 733.425932] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "f7a42358-f26a-4651-a929-d3836f050648" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 733.498078] env[70020]: ERROR nova.scheduler.client.report [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] [req-1e0d65b9-6038-49fa-9f70-2399bee5a477] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1e0d65b9-6038-49fa-9f70-2399bee5a477"}]} [ 733.514054] env[70020]: DEBUG nova.scheduler.client.report [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 733.526813] env[70020]: DEBUG nova.scheduler.client.report [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 733.527060] env[70020]: DEBUG nova.compute.provider_tree [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 733.539665] env[70020]: DEBUG nova.scheduler.client.report [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 733.557543] env[70020]: DEBUG nova.scheduler.client.report [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 733.811058] env[70020]: DEBUG oslo_vmware.api [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5223475c-c5a9-eb03-af37-aad36fdf0c3d, 'name': SearchDatastore_Task, 'duration_secs': 0.008245} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.814211] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-932a1d70-e776-4e55-9151-1859281bfe0a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.819404] env[70020]: DEBUG oslo_vmware.api [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Waiting for the task: (returnval){ [ 733.819404] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52b24df3-1a72-1cce-3e80-6da130b5a432" [ 733.819404] env[70020]: _type = "Task" [ 733.819404] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.829140] env[70020]: DEBUG oslo_vmware.api [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b24df3-1a72-1cce-3e80-6da130b5a432, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.877188] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Acquiring lock "refresh_cache-16c45b86-317a-4d0c-a402-51c85af37a5b" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.877362] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Acquired lock "refresh_cache-16c45b86-317a-4d0c-a402-51c85af37a5b" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 733.877492] env[70020]: DEBUG nova.network.neutron [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 734.030815] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-464bc237-7498-4b77-8b17-1a3a32fb328a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.038692] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf6e70a0-56ff-4ae9-a01a-a2849bd45152 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.081727] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-424908f7-1b06-4c46-8c71-c0a470b9a73b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.090500] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6259efb-14dd-4992-b578-39341291993c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.104456] env[70020]: DEBUG nova.compute.provider_tree [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 734.330372] env[70020]: DEBUG oslo_vmware.api [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b24df3-1a72-1cce-3e80-6da130b5a432, 'name': SearchDatastore_Task, 'duration_secs': 0.01038} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.330646] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 734.330910] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 61bea079-9731-48d1-b472-b30226a0b5a1/61bea079-9731-48d1-b472-b30226a0b5a1.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 734.331195] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-832a00c3-1720-4d4e-a944-c9504b1333ba {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.337363] env[70020]: DEBUG oslo_vmware.api [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Waiting for the task: (returnval){ [ 734.337363] env[70020]: value = "task-3618037" [ 734.337363] env[70020]: _type = "Task" [ 734.337363] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.344963] env[70020]: DEBUG oslo_vmware.api [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': task-3618037, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.413485] env[70020]: DEBUG nova.network.neutron [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 734.547524] env[70020]: DEBUG nova.network.neutron [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Updating instance_info_cache with network_info: [{"id": "acad913d-4d60-4211-8b2f-e30f6f1d525c", "address": "fa:16:3e:ff:db:65", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacad913d-4d", "ovs_interfaceid": "acad913d-4d60-4211-8b2f-e30f6f1d525c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.640040] env[70020]: DEBUG nova.scheduler.client.report [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Updated inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with generation 67 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 734.640166] env[70020]: DEBUG nova.compute.provider_tree [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Updating resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d generation from 67 to 68 during operation: update_inventory {{(pid=70020) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 734.640314] env[70020]: DEBUG nova.compute.provider_tree [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 734.856138] env[70020]: DEBUG oslo_vmware.api [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': task-3618037, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.050848] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Releasing lock "refresh_cache-16c45b86-317a-4d0c-a402-51c85af37a5b" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 735.051298] env[70020]: DEBUG nova.compute.manager [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Instance network_info: |[{"id": "acad913d-4d60-4211-8b2f-e30f6f1d525c", "address": "fa:16:3e:ff:db:65", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacad913d-4d", "ovs_interfaceid": "acad913d-4d60-4211-8b2f-e30f6f1d525c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 735.051726] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ff:db:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '822050c7-1845-485d-b87e-73778d21c33c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'acad913d-4d60-4211-8b2f-e30f6f1d525c', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 735.059985] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Creating folder: Project (d3b44270b4b74bdba8befc7bc5f55e52). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 735.060159] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9ef04975-da8f-4c7f-9ce5-cea300d9c887 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.069431] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Created folder: Project (d3b44270b4b74bdba8befc7bc5f55e52) in parent group-v721521. [ 735.069599] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Creating folder: Instances. Parent ref: group-v721639. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 735.069848] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-14191a10-cfc0-42ee-ac5a-b0420531cbf1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.077678] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Created folder: Instances in parent group-v721639. [ 735.077890] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 735.078096] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 735.078309] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-584caea4-fb29-45e8-9186-fa19e6cbfef6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.095874] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 735.095874] env[70020]: value = "task-3618040" [ 735.095874] env[70020]: _type = "Task" [ 735.095874] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.105973] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618040, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.146139] env[70020]: DEBUG oslo_concurrency.lockutils [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 4.444s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 735.148787] env[70020]: DEBUG oslo_concurrency.lockutils [None req-deebd809-0e5e-497a-a8ad-ca9fe220b364 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.906s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 735.149332] env[70020]: DEBUG nova.objects.instance [None req-deebd809-0e5e-497a-a8ad-ca9fe220b364 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Lazy-loading 'resources' on Instance uuid 832a38c8-ed3a-460b-91bd-0138d2f2d03d {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 735.167077] env[70020]: INFO nova.scheduler.client.report [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Deleted allocations for instance c4335d00-29a3-4f2e-b826-1a78ef02e0bf [ 735.322434] env[70020]: DEBUG nova.compute.manager [req-d65de541-55ee-4cac-a289-aebc849c3b94 req-0bdc0b85-b5b2-41ea-8a15-646957f2149f service nova] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Received event network-changed-acad913d-4d60-4211-8b2f-e30f6f1d525c {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 735.322434] env[70020]: DEBUG nova.compute.manager [req-d65de541-55ee-4cac-a289-aebc849c3b94 req-0bdc0b85-b5b2-41ea-8a15-646957f2149f service nova] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Refreshing instance network info cache due to event network-changed-acad913d-4d60-4211-8b2f-e30f6f1d525c. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 735.322434] env[70020]: DEBUG oslo_concurrency.lockutils [req-d65de541-55ee-4cac-a289-aebc849c3b94 req-0bdc0b85-b5b2-41ea-8a15-646957f2149f service nova] Acquiring lock "refresh_cache-16c45b86-317a-4d0c-a402-51c85af37a5b" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.322434] env[70020]: DEBUG oslo_concurrency.lockutils [req-d65de541-55ee-4cac-a289-aebc849c3b94 req-0bdc0b85-b5b2-41ea-8a15-646957f2149f service nova] Acquired lock "refresh_cache-16c45b86-317a-4d0c-a402-51c85af37a5b" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 735.322434] env[70020]: DEBUG nova.network.neutron [req-d65de541-55ee-4cac-a289-aebc849c3b94 req-0bdc0b85-b5b2-41ea-8a15-646957f2149f service nova] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Refreshing network info cache for port acad913d-4d60-4211-8b2f-e30f6f1d525c {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 735.348154] env[70020]: DEBUG oslo_vmware.api [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': task-3618037, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.718469} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.348426] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 61bea079-9731-48d1-b472-b30226a0b5a1/61bea079-9731-48d1-b472-b30226a0b5a1.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 735.348642] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 735.348894] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7b3458c6-ddee-4922-b8a7-29d4ff2dbb18 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.355601] env[70020]: DEBUG oslo_vmware.api [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Waiting for the task: (returnval){ [ 735.355601] env[70020]: value = "task-3618041" [ 735.355601] env[70020]: _type = "Task" [ 735.355601] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.363241] env[70020]: DEBUG oslo_vmware.api [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': task-3618041, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.605967] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618040, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.674528] env[70020]: DEBUG oslo_concurrency.lockutils [None req-647ebfca-d83c-4831-90c7-7844ede1aa7a tempest-ServerPasswordTestJSON-928370253 tempest-ServerPasswordTestJSON-928370253-project-member] Lock "c4335d00-29a3-4f2e-b826-1a78ef02e0bf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.381s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 735.866474] env[70020]: DEBUG oslo_vmware.api [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': task-3618041, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069261} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.868756] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 735.869949] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52312a60-10c4-41ac-ab7a-b5445c05b0db {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.889796] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Reconfiguring VM instance instance-00000022 to attach disk [datastore2] 61bea079-9731-48d1-b472-b30226a0b5a1/61bea079-9731-48d1-b472-b30226a0b5a1.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 735.892936] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e7ca0eda-b045-46dc-ac84-91c70332d4c8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.915625] env[70020]: DEBUG oslo_vmware.api [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Waiting for the task: (returnval){ [ 735.915625] env[70020]: value = "task-3618042" [ 735.915625] env[70020]: _type = "Task" [ 735.915625] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.926560] env[70020]: DEBUG oslo_vmware.api [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': task-3618042, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.042577] env[70020]: DEBUG nova.network.neutron [req-d65de541-55ee-4cac-a289-aebc849c3b94 req-0bdc0b85-b5b2-41ea-8a15-646957f2149f service nova] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Updated VIF entry in instance network info cache for port acad913d-4d60-4211-8b2f-e30f6f1d525c. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 736.042927] env[70020]: DEBUG nova.network.neutron [req-d65de541-55ee-4cac-a289-aebc849c3b94 req-0bdc0b85-b5b2-41ea-8a15-646957f2149f service nova] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Updating instance_info_cache with network_info: [{"id": "acad913d-4d60-4211-8b2f-e30f6f1d525c", "address": "fa:16:3e:ff:db:65", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacad913d-4d", "ovs_interfaceid": "acad913d-4d60-4211-8b2f-e30f6f1d525c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.105429] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618040, 'name': CreateVM_Task, 'duration_secs': 0.731007} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.105502] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 736.106820] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.106820] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 736.106820] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 736.109453] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbc98a8c-a040-48c7-bfcb-b244e5540014 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.114590] env[70020]: DEBUG oslo_vmware.api [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Waiting for the task: (returnval){ [ 736.114590] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52ec91bd-50a1-7c7b-20eb-25362254c442" [ 736.114590] env[70020]: _type = "Task" [ 736.114590] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.122724] env[70020]: DEBUG oslo_vmware.api [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ec91bd-50a1-7c7b-20eb-25362254c442, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.173369] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-954d3ae6-8fd4-4b3f-b07b-89c915f26414 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.182546] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14151c3a-bfa4-499f-94df-963a764bfc8b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.216818] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0b30e54-12f8-4ba3-9a14-4e7b46434f19 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.224782] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0ae559d-3956-43e3-a27f-5edc51c51af7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.238503] env[70020]: DEBUG nova.compute.provider_tree [None req-deebd809-0e5e-497a-a8ad-ca9fe220b364 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 736.426449] env[70020]: DEBUG oslo_vmware.api [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': task-3618042, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.549838] env[70020]: DEBUG oslo_concurrency.lockutils [req-d65de541-55ee-4cac-a289-aebc849c3b94 req-0bdc0b85-b5b2-41ea-8a15-646957f2149f service nova] Releasing lock "refresh_cache-16c45b86-317a-4d0c-a402-51c85af37a5b" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 736.628263] env[70020]: DEBUG oslo_vmware.api [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ec91bd-50a1-7c7b-20eb-25362254c442, 'name': SearchDatastore_Task, 'duration_secs': 0.01054} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.628263] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 736.628263] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 736.628263] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.628575] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 736.628575] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 736.628575] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-35b3cf80-1863-4ecb-9a85-1ddbb82b8a79 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.637366] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 736.637781] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 736.638623] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0b56321-e15a-440e-865d-afa60969945e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.644186] env[70020]: DEBUG oslo_vmware.api [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Waiting for the task: (returnval){ [ 736.644186] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52939f5f-0ce6-2387-938b-ac1ee8889d04" [ 736.644186] env[70020]: _type = "Task" [ 736.644186] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.661241] env[70020]: DEBUG oslo_vmware.api [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52939f5f-0ce6-2387-938b-ac1ee8889d04, 'name': SearchDatastore_Task, 'duration_secs': 0.008635} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.665144] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9c5849c-172e-420f-b82d-849ab0e53a07 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.668701] env[70020]: DEBUG oslo_vmware.api [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Waiting for the task: (returnval){ [ 736.668701] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52230260-bf1d-f0a7-ad25-7bb034f75886" [ 736.668701] env[70020]: _type = "Task" [ 736.668701] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.677562] env[70020]: DEBUG oslo_vmware.api [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52230260-bf1d-f0a7-ad25-7bb034f75886, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.782339] env[70020]: DEBUG nova.scheduler.client.report [None req-deebd809-0e5e-497a-a8ad-ca9fe220b364 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Updated inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with generation 68 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 736.782623] env[70020]: DEBUG nova.compute.provider_tree [None req-deebd809-0e5e-497a-a8ad-ca9fe220b364 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Updating resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d generation from 68 to 69 during operation: update_inventory {{(pid=70020) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 736.782803] env[70020]: DEBUG nova.compute.provider_tree [None req-deebd809-0e5e-497a-a8ad-ca9fe220b364 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 736.927390] env[70020]: DEBUG oslo_vmware.api [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': task-3618042, 'name': ReconfigVM_Task, 'duration_secs': 0.987556} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.927600] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Reconfigured VM instance instance-00000022 to attach disk [datastore2] 61bea079-9731-48d1-b472-b30226a0b5a1/61bea079-9731-48d1-b472-b30226a0b5a1.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 736.928794] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a074aa8b-dce7-4742-a251-4593c6bd4f8e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.935188] env[70020]: DEBUG oslo_vmware.api [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Waiting for the task: (returnval){ [ 736.935188] env[70020]: value = "task-3618043" [ 736.935188] env[70020]: _type = "Task" [ 736.935188] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.947025] env[70020]: DEBUG oslo_vmware.api [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': task-3618043, 'name': Rename_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.182775] env[70020]: DEBUG oslo_vmware.api [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52230260-bf1d-f0a7-ad25-7bb034f75886, 'name': SearchDatastore_Task, 'duration_secs': 0.009251} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.183163] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 737.183816] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 16c45b86-317a-4d0c-a402-51c85af37a5b/16c45b86-317a-4d0c-a402-51c85af37a5b.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 737.183873] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-deb9f77b-eb4f-4301-a3e2-1ca2738aafc2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.191225] env[70020]: DEBUG oslo_vmware.api [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Waiting for the task: (returnval){ [ 737.191225] env[70020]: value = "task-3618044" [ 737.191225] env[70020]: _type = "Task" [ 737.191225] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.199520] env[70020]: DEBUG oslo_vmware.api [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Task: {'id': task-3618044, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.288339] env[70020]: DEBUG oslo_concurrency.lockutils [None req-deebd809-0e5e-497a-a8ad-ca9fe220b364 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.139s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 737.291144] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.731s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 737.292569] env[70020]: INFO nova.compute.claims [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 737.316611] env[70020]: INFO nova.scheduler.client.report [None req-deebd809-0e5e-497a-a8ad-ca9fe220b364 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Deleted allocations for instance 832a38c8-ed3a-460b-91bd-0138d2f2d03d [ 737.447556] env[70020]: DEBUG oslo_vmware.api [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': task-3618043, 'name': Rename_Task, 'duration_secs': 0.126774} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.447939] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 737.448208] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c3a1e9a8-7630-48ae-a5c8-79bb3f39c3b7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.456049] env[70020]: DEBUG oslo_vmware.api [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Waiting for the task: (returnval){ [ 737.456049] env[70020]: value = "task-3618045" [ 737.456049] env[70020]: _type = "Task" [ 737.456049] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.467058] env[70020]: DEBUG oslo_vmware.api [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': task-3618045, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.584322] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d46db345-f034-47f0-916e-d803f8844822 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Acquiring lock "301b30f6-9909-4fc9-8721-88a314e4edb4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 737.584611] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d46db345-f034-47f0-916e-d803f8844822 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Lock "301b30f6-9909-4fc9-8721-88a314e4edb4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 737.584850] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d46db345-f034-47f0-916e-d803f8844822 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Acquiring lock "301b30f6-9909-4fc9-8721-88a314e4edb4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 737.585032] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d46db345-f034-47f0-916e-d803f8844822 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Lock "301b30f6-9909-4fc9-8721-88a314e4edb4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 737.585233] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d46db345-f034-47f0-916e-d803f8844822 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Lock "301b30f6-9909-4fc9-8721-88a314e4edb4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 737.587619] env[70020]: INFO nova.compute.manager [None req-d46db345-f034-47f0-916e-d803f8844822 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Terminating instance [ 737.704933] env[70020]: DEBUG oslo_vmware.api [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Task: {'id': task-3618044, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.485988} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.705585] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 16c45b86-317a-4d0c-a402-51c85af37a5b/16c45b86-317a-4d0c-a402-51c85af37a5b.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 737.705852] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 737.706124] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-258d13ca-27c2-482a-bb58-a1f1565e6337 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.714786] env[70020]: DEBUG oslo_vmware.api [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Waiting for the task: (returnval){ [ 737.714786] env[70020]: value = "task-3618046" [ 737.714786] env[70020]: _type = "Task" [ 737.714786] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.723816] env[70020]: DEBUG oslo_vmware.api [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Task: {'id': task-3618046, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.830123] env[70020]: DEBUG oslo_concurrency.lockutils [None req-deebd809-0e5e-497a-a8ad-ca9fe220b364 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Lock "832a38c8-ed3a-460b-91bd-0138d2f2d03d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.103s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 737.968507] env[70020]: DEBUG oslo_vmware.api [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': task-3618045, 'name': PowerOnVM_Task} progress is 88%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.092095] env[70020]: DEBUG nova.compute.manager [None req-d46db345-f034-47f0-916e-d803f8844822 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 738.092348] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d46db345-f034-47f0-916e-d803f8844822 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 738.093249] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfc90332-38bc-4344-a6e6-60c434e54dae {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.100922] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-d46db345-f034-47f0-916e-d803f8844822 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 738.101223] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-48041408-77d5-4a07-8d3c-78f3acf7d4bb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.107399] env[70020]: DEBUG oslo_vmware.api [None req-d46db345-f034-47f0-916e-d803f8844822 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Waiting for the task: (returnval){ [ 738.107399] env[70020]: value = "task-3618047" [ 738.107399] env[70020]: _type = "Task" [ 738.107399] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.115222] env[70020]: DEBUG oslo_vmware.api [None req-d46db345-f034-47f0-916e-d803f8844822 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': task-3618047, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.226437] env[70020]: DEBUG oslo_vmware.api [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Task: {'id': task-3618046, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.104478} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.226542] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 738.227700] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcb8e47c-8358-422c-9a15-d9cbfca73ad7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.256454] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Reconfiguring VM instance instance-00000024 to attach disk [datastore2] 16c45b86-317a-4d0c-a402-51c85af37a5b/16c45b86-317a-4d0c-a402-51c85af37a5b.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 738.256815] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a960dfa8-4f14-48fa-90b2-978a118f277e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.279652] env[70020]: DEBUG oslo_concurrency.lockutils [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Acquiring lock "f16d60a4-5f80-4f41-b994-068de48775ad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 738.279909] env[70020]: DEBUG oslo_concurrency.lockutils [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Lock "f16d60a4-5f80-4f41-b994-068de48775ad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 738.285289] env[70020]: DEBUG oslo_vmware.api [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Waiting for the task: (returnval){ [ 738.285289] env[70020]: value = "task-3618048" [ 738.285289] env[70020]: _type = "Task" [ 738.285289] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.294584] env[70020]: DEBUG oslo_vmware.api [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Task: {'id': task-3618048, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.467207] env[70020]: DEBUG oslo_vmware.api [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': task-3618045, 'name': PowerOnVM_Task, 'duration_secs': 0.889167} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.468152] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 738.468152] env[70020]: DEBUG nova.compute.manager [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 738.470870] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25acd28e-f295-4218-97bc-f7d48e774058 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.618561] env[70020]: DEBUG oslo_vmware.api [None req-d46db345-f034-47f0-916e-d803f8844822 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': task-3618047, 'name': PowerOffVM_Task, 'duration_secs': 0.349724} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.621486] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-d46db345-f034-47f0-916e-d803f8844822 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 738.621943] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d46db345-f034-47f0-916e-d803f8844822 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 738.622798] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dd452e8e-befe-482c-850c-f578239e3c24 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.744674] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d46db345-f034-47f0-916e-d803f8844822 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 738.744974] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d46db345-f034-47f0-916e-d803f8844822 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 738.745091] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-d46db345-f034-47f0-916e-d803f8844822 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Deleting the datastore file [datastore1] 301b30f6-9909-4fc9-8721-88a314e4edb4 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 738.745364] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3e91ffbc-3b4c-4a24-9af6-f244fd19357d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.753234] env[70020]: DEBUG oslo_vmware.api [None req-d46db345-f034-47f0-916e-d803f8844822 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Waiting for the task: (returnval){ [ 738.753234] env[70020]: value = "task-3618050" [ 738.753234] env[70020]: _type = "Task" [ 738.753234] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.764199] env[70020]: DEBUG oslo_vmware.api [None req-d46db345-f034-47f0-916e-d803f8844822 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': task-3618050, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.797558] env[70020]: DEBUG oslo_vmware.api [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Task: {'id': task-3618048, 'name': ReconfigVM_Task, 'duration_secs': 0.301217} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.797850] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Reconfigured VM instance instance-00000024 to attach disk [datastore2] 16c45b86-317a-4d0c-a402-51c85af37a5b/16c45b86-317a-4d0c-a402-51c85af37a5b.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 738.798744] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5bee6a69-d60c-4f51-9669-19d8bbc24ea9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.806146] env[70020]: DEBUG oslo_vmware.api [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Waiting for the task: (returnval){ [ 738.806146] env[70020]: value = "task-3618051" [ 738.806146] env[70020]: _type = "Task" [ 738.806146] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.814905] env[70020]: DEBUG oslo_vmware.api [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Task: {'id': task-3618051, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.900091] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b04df3a-96dc-4b0d-826e-d8637051e3a4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.907888] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91849c7d-e66a-447e-a09b-277a8ab57078 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.939477] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1ae5540-cf91-435b-adf0-6c8f9ee99d41 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.947987] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30bebba7-20d5-4420-b696-2844859f9d75 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.961353] env[70020]: DEBUG nova.compute.provider_tree [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 738.988931] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 739.265672] env[70020]: DEBUG oslo_vmware.api [None req-d46db345-f034-47f0-916e-d803f8844822 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': task-3618050, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148513} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.265989] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-d46db345-f034-47f0-916e-d803f8844822 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 739.266199] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d46db345-f034-47f0-916e-d803f8844822 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 739.266379] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d46db345-f034-47f0-916e-d803f8844822 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 739.266706] env[70020]: INFO nova.compute.manager [None req-d46db345-f034-47f0-916e-d803f8844822 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Took 1.17 seconds to destroy the instance on the hypervisor. [ 739.266782] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d46db345-f034-47f0-916e-d803f8844822 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 739.266958] env[70020]: DEBUG nova.compute.manager [-] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 739.267065] env[70020]: DEBUG nova.network.neutron [-] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 739.316776] env[70020]: DEBUG oslo_vmware.api [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Task: {'id': task-3618051, 'name': Rename_Task, 'duration_secs': 0.153957} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.317113] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 739.317427] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1acac82e-5b89-4bb1-8fa9-6eab4c065244 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.325559] env[70020]: DEBUG oslo_vmware.api [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Waiting for the task: (returnval){ [ 739.325559] env[70020]: value = "task-3618052" [ 739.325559] env[70020]: _type = "Task" [ 739.325559] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.333675] env[70020]: DEBUG oslo_vmware.api [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Task: {'id': task-3618052, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.490293] env[70020]: ERROR nova.scheduler.client.report [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] [req-94b54f92-1002-4016-9aae-a04914bbb084] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-94b54f92-1002-4016-9aae-a04914bbb084"}]} [ 739.510187] env[70020]: DEBUG nova.scheduler.client.report [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 739.533287] env[70020]: DEBUG nova.scheduler.client.report [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 739.533287] env[70020]: DEBUG nova.compute.provider_tree [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 739.545274] env[70020]: DEBUG nova.scheduler.client.report [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 739.564974] env[70020]: DEBUG nova.scheduler.client.report [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 739.838007] env[70020]: DEBUG oslo_vmware.api [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Task: {'id': task-3618052, 'name': PowerOnVM_Task, 'duration_secs': 0.489647} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.838458] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 739.838680] env[70020]: INFO nova.compute.manager [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Took 7.11 seconds to spawn the instance on the hypervisor. [ 739.838900] env[70020]: DEBUG nova.compute.manager [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 739.842123] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc8d734e-fe61-4cfa-97a5-64026dc4e472 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.885212] env[70020]: DEBUG nova.compute.manager [req-76d923a3-430c-415d-9546-c35145baff75 req-34ee41a6-af14-4fe9-8d47-cdf048cbc672 service nova] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Received event network-vif-deleted-31ef45d2-b59a-4c2c-9fdc-f17ae158e442 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 739.885432] env[70020]: INFO nova.compute.manager [req-76d923a3-430c-415d-9546-c35145baff75 req-34ee41a6-af14-4fe9-8d47-cdf048cbc672 service nova] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Neutron deleted interface 31ef45d2-b59a-4c2c-9fdc-f17ae158e442; detaching it from the instance and deleting it from the info cache [ 739.885600] env[70020]: DEBUG nova.network.neutron [req-76d923a3-430c-415d-9546-c35145baff75 req-34ee41a6-af14-4fe9-8d47-cdf048cbc672 service nova] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.086446] env[70020]: DEBUG oslo_concurrency.lockutils [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Acquiring lock "29d41731-4ae2-4cc4-bfda-b7356922c8ff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 740.086676] env[70020]: DEBUG oslo_concurrency.lockutils [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Lock "29d41731-4ae2-4cc4-bfda-b7356922c8ff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 740.123561] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6dcabf2c-e104-4a77-8182-e9651d1bbd28 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Acquiring lock "61bea079-9731-48d1-b472-b30226a0b5a1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 740.123842] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6dcabf2c-e104-4a77-8182-e9651d1bbd28 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Lock "61bea079-9731-48d1-b472-b30226a0b5a1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 740.124071] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6dcabf2c-e104-4a77-8182-e9651d1bbd28 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Acquiring lock "61bea079-9731-48d1-b472-b30226a0b5a1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 740.124444] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6dcabf2c-e104-4a77-8182-e9651d1bbd28 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Lock "61bea079-9731-48d1-b472-b30226a0b5a1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 740.124655] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6dcabf2c-e104-4a77-8182-e9651d1bbd28 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Lock "61bea079-9731-48d1-b472-b30226a0b5a1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 740.127528] env[70020]: INFO nova.compute.manager [None req-6dcabf2c-e104-4a77-8182-e9651d1bbd28 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Terminating instance [ 740.161898] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c02239de-39ab-4912-abdd-043e6f6f04a3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.170129] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f64ab8ef-4e12-43e2-95fe-c8527a46c16d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.203438] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc8e667e-54df-42fa-8c6d-bace0aa684ba {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.210775] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7da3b7fe-de08-4cee-a29b-54011b787ccf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.231391] env[70020]: DEBUG nova.compute.provider_tree [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 740.246920] env[70020]: DEBUG nova.network.neutron [-] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.360462] env[70020]: INFO nova.compute.manager [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Took 45.41 seconds to build instance. [ 740.391713] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f029e5a9-7d46-42d4-aa00-4289f53db231 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.401539] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25853a19-239c-412e-9633-5aae8befc970 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.433557] env[70020]: DEBUG nova.compute.manager [req-76d923a3-430c-415d-9546-c35145baff75 req-34ee41a6-af14-4fe9-8d47-cdf048cbc672 service nova] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Detach interface failed, port_id=31ef45d2-b59a-4c2c-9fdc-f17ae158e442, reason: Instance 301b30f6-9909-4fc9-8721-88a314e4edb4 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 740.640580] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6dcabf2c-e104-4a77-8182-e9651d1bbd28 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Acquiring lock "refresh_cache-61bea079-9731-48d1-b472-b30226a0b5a1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.640580] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6dcabf2c-e104-4a77-8182-e9651d1bbd28 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Acquired lock "refresh_cache-61bea079-9731-48d1-b472-b30226a0b5a1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 740.640580] env[70020]: DEBUG nova.network.neutron [None req-6dcabf2c-e104-4a77-8182-e9651d1bbd28 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 740.751721] env[70020]: INFO nova.compute.manager [-] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Took 1.48 seconds to deallocate network for instance. [ 740.757139] env[70020]: ERROR nova.scheduler.client.report [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] [req-169c0162-5520-4232-a8c3-5c9d307acfef] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-169c0162-5520-4232-a8c3-5c9d307acfef"}]} [ 740.774727] env[70020]: DEBUG nova.scheduler.client.report [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 740.797902] env[70020]: DEBUG nova.scheduler.client.report [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 740.798380] env[70020]: DEBUG nova.compute.provider_tree [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 740.809534] env[70020]: DEBUG nova.scheduler.client.report [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 740.828354] env[70020]: DEBUG nova.scheduler.client.report [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 740.866662] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd1e2fe6-5dac-4ca3-8ca7-367c2b131aaa tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Lock "16c45b86-317a-4d0c-a402-51c85af37a5b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.929s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 741.162577] env[70020]: DEBUG nova.network.neutron [None req-6dcabf2c-e104-4a77-8182-e9651d1bbd28 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 741.217731] env[70020]: DEBUG nova.network.neutron [None req-6dcabf2c-e104-4a77-8182-e9651d1bbd28 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 741.265093] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d46db345-f034-47f0-916e-d803f8844822 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 741.324202] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8bd5fa6-d797-4097-9526-bff94069b1af {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.331653] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e68d39fc-e30b-4e3f-adc1-137c63ae012c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.361534] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60459f92-1238-43fc-94cf-40ddeac233c3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.369153] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2df28b90-4d28-49ac-920a-386697e86960 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.373757] env[70020]: DEBUG nova.compute.manager [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 741.385892] env[70020]: DEBUG nova.compute.provider_tree [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 741.488938] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Acquiring lock "38839949-c717-4f0b-97a7-108d87417b88" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 741.489102] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Lock "38839949-c717-4f0b-97a7-108d87417b88" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 741.719926] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6dcabf2c-e104-4a77-8182-e9651d1bbd28 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Releasing lock "refresh_cache-61bea079-9731-48d1-b472-b30226a0b5a1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 741.720514] env[70020]: DEBUG nova.compute.manager [None req-6dcabf2c-e104-4a77-8182-e9651d1bbd28 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 741.720791] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6dcabf2c-e104-4a77-8182-e9651d1bbd28 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 741.721978] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c1724b0-da02-41c9-bc8c-d612c8ed7028 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.729264] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-6dcabf2c-e104-4a77-8182-e9651d1bbd28 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 741.729503] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-baef13b8-7924-4ff5-80cf-dd0f73f716e8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.735216] env[70020]: DEBUG oslo_vmware.api [None req-6dcabf2c-e104-4a77-8182-e9651d1bbd28 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Waiting for the task: (returnval){ [ 741.735216] env[70020]: value = "task-3618053" [ 741.735216] env[70020]: _type = "Task" [ 741.735216] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.742301] env[70020]: DEBUG oslo_vmware.api [None req-6dcabf2c-e104-4a77-8182-e9651d1bbd28 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': task-3618053, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.895382] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 741.906884] env[70020]: ERROR nova.scheduler.client.report [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] [req-d1df02f1-9944-44ee-9901-04a9ed695b7f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d1df02f1-9944-44ee-9901-04a9ed695b7f"}]} [ 741.922245] env[70020]: DEBUG nova.scheduler.client.report [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 741.934787] env[70020]: DEBUG nova.scheduler.client.report [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 741.934988] env[70020]: DEBUG nova.compute.provider_tree [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 741.945750] env[70020]: DEBUG nova.scheduler.client.report [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 741.962879] env[70020]: DEBUG nova.scheduler.client.report [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 742.245033] env[70020]: DEBUG oslo_vmware.api [None req-6dcabf2c-e104-4a77-8182-e9651d1bbd28 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': task-3618053, 'name': PowerOffVM_Task, 'duration_secs': 0.159881} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.247419] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-6dcabf2c-e104-4a77-8182-e9651d1bbd28 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 742.247594] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6dcabf2c-e104-4a77-8182-e9651d1bbd28 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 742.248014] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8be2dd80-8eec-4771-88ba-3904e592ba2b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.271981] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6dcabf2c-e104-4a77-8182-e9651d1bbd28 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 742.272234] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6dcabf2c-e104-4a77-8182-e9651d1bbd28 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 742.272417] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-6dcabf2c-e104-4a77-8182-e9651d1bbd28 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Deleting the datastore file [datastore2] 61bea079-9731-48d1-b472-b30226a0b5a1 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 742.272668] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-50cb021e-d3e7-41de-a6e3-d04492242e7a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.280573] env[70020]: DEBUG oslo_vmware.api [None req-6dcabf2c-e104-4a77-8182-e9651d1bbd28 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Waiting for the task: (returnval){ [ 742.280573] env[70020]: value = "task-3618055" [ 742.280573] env[70020]: _type = "Task" [ 742.280573] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.290472] env[70020]: DEBUG oslo_vmware.api [None req-6dcabf2c-e104-4a77-8182-e9651d1bbd28 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': task-3618055, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.438914] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48208e0b-ce2c-492b-9a07-f52038853457 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.446255] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39703d1c-9217-4190-8200-5aa6863fa55c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.477472] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbd39790-dd18-4bdb-893e-dcda0e94b568 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.485164] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bb25175-ee70-48ab-af27-cbc157892acd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.498613] env[70020]: DEBUG nova.compute.provider_tree [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 742.790684] env[70020]: DEBUG oslo_vmware.api [None req-6dcabf2c-e104-4a77-8182-e9651d1bbd28 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Task: {'id': task-3618055, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.092292} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.790907] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-6dcabf2c-e104-4a77-8182-e9651d1bbd28 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 742.791141] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6dcabf2c-e104-4a77-8182-e9651d1bbd28 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 742.791314] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6dcabf2c-e104-4a77-8182-e9651d1bbd28 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 742.791487] env[70020]: INFO nova.compute.manager [None req-6dcabf2c-e104-4a77-8182-e9651d1bbd28 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Took 1.07 seconds to destroy the instance on the hypervisor. [ 742.791772] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6dcabf2c-e104-4a77-8182-e9651d1bbd28 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 742.791919] env[70020]: DEBUG nova.compute.manager [-] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 742.792022] env[70020]: DEBUG nova.network.neutron [-] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 742.807623] env[70020]: DEBUG nova.network.neutron [-] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 743.037894] env[70020]: DEBUG nova.scheduler.client.report [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Updated inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with generation 72 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 743.038211] env[70020]: DEBUG nova.compute.provider_tree [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Updating resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d generation from 72 to 73 during operation: update_inventory {{(pid=70020) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 743.038349] env[70020]: DEBUG nova.compute.provider_tree [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 743.312226] env[70020]: DEBUG nova.network.neutron [-] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.545014] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 6.252s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 743.545014] env[70020]: DEBUG nova.compute.manager [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 743.546790] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e3578fc1-7c65-499b-8859-80b834f3d8a9 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 42.368s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 743.547695] env[70020]: DEBUG nova.objects.instance [None req-e3578fc1-7c65-499b-8859-80b834f3d8a9 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Lazy-loading 'resources' on Instance uuid f53cb08c-0939-4cb1-8476-8b289d6a1b05 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 743.813844] env[70020]: INFO nova.compute.manager [-] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Took 1.02 seconds to deallocate network for instance. [ 744.054309] env[70020]: DEBUG nova.compute.utils [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 744.056134] env[70020]: DEBUG nova.compute.manager [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 744.056134] env[70020]: DEBUG nova.network.neutron [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 744.100738] env[70020]: DEBUG nova.policy [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '215c9a4a63f149b580991b6aa5ac2f45', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df815d08ee8e4aae9d880cdb980c2ad5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 744.320643] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6dcabf2c-e104-4a77-8182-e9651d1bbd28 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 744.387735] env[70020]: DEBUG nova.network.neutron [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Successfully created port: 271f324e-5244-40df-9393-7b0a123839bb {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 744.551278] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f9768a8-c30f-4507-9b7a-372653ca2dcd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.558584] env[70020]: DEBUG nova.compute.manager [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 744.562069] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9c6f5d1-8ece-4fbb-9253-f0bb1757634e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.593680] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f0730fa-e385-4056-866e-15dd09d4d9ca {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.605052] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a277fa62-d085-4ac3-8ea4-be0b2bb20531 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.620336] env[70020]: DEBUG nova.compute.provider_tree [None req-e3578fc1-7c65-499b-8859-80b834f3d8a9 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 745.122888] env[70020]: DEBUG nova.scheduler.client.report [None req-e3578fc1-7c65-499b-8859-80b834f3d8a9 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 745.573924] env[70020]: DEBUG nova.compute.manager [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 745.599974] env[70020]: DEBUG nova.virt.hardware [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 745.600931] env[70020]: DEBUG nova.virt.hardware [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 745.600931] env[70020]: DEBUG nova.virt.hardware [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 745.600931] env[70020]: DEBUG nova.virt.hardware [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 745.600931] env[70020]: DEBUG nova.virt.hardware [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 745.601234] env[70020]: DEBUG nova.virt.hardware [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 745.601351] env[70020]: DEBUG nova.virt.hardware [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 745.601507] env[70020]: DEBUG nova.virt.hardware [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 745.601666] env[70020]: DEBUG nova.virt.hardware [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 745.601820] env[70020]: DEBUG nova.virt.hardware [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 745.601991] env[70020]: DEBUG nova.virt.hardware [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 745.602905] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-300d1457-3e67-4b76-ac69-f4593a0e228b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.611238] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e711ce1a-a151-4052-a188-962b617e83cd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.627966] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e3578fc1-7c65-499b-8859-80b834f3d8a9 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.081s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 745.630155] env[70020]: DEBUG oslo_concurrency.lockutils [None req-92ab9a43-2fd6-4b4b-8ea7-7f52f4db0033 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.224s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 745.631062] env[70020]: DEBUG nova.objects.instance [None req-92ab9a43-2fd6-4b4b-8ea7-7f52f4db0033 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Lazy-loading 'resources' on Instance uuid d0756709-f17b-441e-b537-df937cfbde84 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 745.670479] env[70020]: INFO nova.scheduler.client.report [None req-e3578fc1-7c65-499b-8859-80b834f3d8a9 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Deleted allocations for instance f53cb08c-0939-4cb1-8476-8b289d6a1b05 [ 745.850198] env[70020]: DEBUG nova.compute.manager [req-f1c57a12-55fe-4643-b4af-e709e781e835 req-7e1ac6e8-2c32-4868-b98c-14cc921331b1 service nova] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Received event network-vif-plugged-271f324e-5244-40df-9393-7b0a123839bb {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 745.850198] env[70020]: DEBUG oslo_concurrency.lockutils [req-f1c57a12-55fe-4643-b4af-e709e781e835 req-7e1ac6e8-2c32-4868-b98c-14cc921331b1 service nova] Acquiring lock "8f7e4e69-0796-469f-8a2b-4e19fbf15ed3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 745.850198] env[70020]: DEBUG oslo_concurrency.lockutils [req-f1c57a12-55fe-4643-b4af-e709e781e835 req-7e1ac6e8-2c32-4868-b98c-14cc921331b1 service nova] Lock "8f7e4e69-0796-469f-8a2b-4e19fbf15ed3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 745.850594] env[70020]: DEBUG oslo_concurrency.lockutils [req-f1c57a12-55fe-4643-b4af-e709e781e835 req-7e1ac6e8-2c32-4868-b98c-14cc921331b1 service nova] Lock "8f7e4e69-0796-469f-8a2b-4e19fbf15ed3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 745.850939] env[70020]: DEBUG nova.compute.manager [req-f1c57a12-55fe-4643-b4af-e709e781e835 req-7e1ac6e8-2c32-4868-b98c-14cc921331b1 service nova] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] No waiting events found dispatching network-vif-plugged-271f324e-5244-40df-9393-7b0a123839bb {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 745.851285] env[70020]: WARNING nova.compute.manager [req-f1c57a12-55fe-4643-b4af-e709e781e835 req-7e1ac6e8-2c32-4868-b98c-14cc921331b1 service nova] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Received unexpected event network-vif-plugged-271f324e-5244-40df-9393-7b0a123839bb for instance with vm_state building and task_state spawning. [ 745.994807] env[70020]: DEBUG nova.network.neutron [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Successfully updated port: 271f324e-5244-40df-9393-7b0a123839bb {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 746.179954] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e3578fc1-7c65-499b-8859-80b834f3d8a9 tempest-ServerMetadataNegativeTestJSON-1247764260 tempest-ServerMetadataNegativeTestJSON-1247764260-project-member] Lock "f53cb08c-0939-4cb1-8476-8b289d6a1b05" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 48.421s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 746.497231] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Acquiring lock "refresh_cache-8f7e4e69-0796-469f-8a2b-4e19fbf15ed3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.497435] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Acquired lock "refresh_cache-8f7e4e69-0796-469f-8a2b-4e19fbf15ed3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 746.497591] env[70020]: DEBUG nova.network.neutron [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 746.646055] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d6887e6-cb94-4a69-815c-f77101f467ef {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.656153] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95ac15f1-5f1d-45e1-8712-57a68899a1bf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.693989] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca7494f9-5e5e-4094-99b9-60cb14ac82f3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.702056] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1eb6ab4-16a8-4afd-9e94-0d3a51bd609a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.716206] env[70020]: DEBUG nova.compute.provider_tree [None req-92ab9a43-2fd6-4b4b-8ea7-7f52f4db0033 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 747.047795] env[70020]: DEBUG nova.network.neutron [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 747.221248] env[70020]: DEBUG nova.scheduler.client.report [None req-92ab9a43-2fd6-4b4b-8ea7-7f52f4db0033 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 747.295246] env[70020]: DEBUG nova.network.neutron [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Updating instance_info_cache with network_info: [{"id": "271f324e-5244-40df-9393-7b0a123839bb", "address": "fa:16:3e:82:d3:b3", "network": {"id": "a3a15f19-9e39-4316-b45e-431cab365a7a", "bridge": "br-int", "label": "tempest-ServersTestJSON-390220531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df815d08ee8e4aae9d880cdb980c2ad5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccf76700-491b-4462-ab19-e6d3a9ff87ac", "external-id": "nsx-vlan-transportzone-956", "segmentation_id": 956, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap271f324e-52", "ovs_interfaceid": "271f324e-5244-40df-9393-7b0a123839bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.728801] env[70020]: DEBUG oslo_concurrency.lockutils [None req-92ab9a43-2fd6-4b4b-8ea7-7f52f4db0033 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.099s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 747.732190] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 41.893s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 747.758484] env[70020]: INFO nova.scheduler.client.report [None req-92ab9a43-2fd6-4b4b-8ea7-7f52f4db0033 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Deleted allocations for instance d0756709-f17b-441e-b537-df937cfbde84 [ 747.802966] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Releasing lock "refresh_cache-8f7e4e69-0796-469f-8a2b-4e19fbf15ed3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 747.802966] env[70020]: DEBUG nova.compute.manager [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Instance network_info: |[{"id": "271f324e-5244-40df-9393-7b0a123839bb", "address": "fa:16:3e:82:d3:b3", "network": {"id": "a3a15f19-9e39-4316-b45e-431cab365a7a", "bridge": "br-int", "label": "tempest-ServersTestJSON-390220531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df815d08ee8e4aae9d880cdb980c2ad5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccf76700-491b-4462-ab19-e6d3a9ff87ac", "external-id": "nsx-vlan-transportzone-956", "segmentation_id": 956, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap271f324e-52", "ovs_interfaceid": "271f324e-5244-40df-9393-7b0a123839bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 747.803126] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:82:d3:b3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ccf76700-491b-4462-ab19-e6d3a9ff87ac', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '271f324e-5244-40df-9393-7b0a123839bb', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 747.812087] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Creating folder: Project (df815d08ee8e4aae9d880cdb980c2ad5). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 747.812897] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-51c97737-df3a-4634-9c02-046d557a4bc0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.826615] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Created folder: Project (df815d08ee8e4aae9d880cdb980c2ad5) in parent group-v721521. [ 747.826885] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Creating folder: Instances. Parent ref: group-v721642. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 747.827207] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2f7da2e3-9efd-4ad6-a914-edbfb619abbc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.841661] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Created folder: Instances in parent group-v721642. [ 747.841952] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 747.842258] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 747.842548] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2ed4c955-3791-4bda-816b-6c23c0b43c15 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.863036] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 747.863036] env[70020]: value = "task-3618058" [ 747.863036] env[70020]: _type = "Task" [ 747.863036] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.870521] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618058, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.925784] env[70020]: DEBUG nova.compute.manager [req-3be2f0b6-ce76-460e-bde5-9595aa8d0c79 req-95307465-3d14-4655-bd20-29d02822751b service nova] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Received event network-changed-271f324e-5244-40df-9393-7b0a123839bb {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 747.925784] env[70020]: DEBUG nova.compute.manager [req-3be2f0b6-ce76-460e-bde5-9595aa8d0c79 req-95307465-3d14-4655-bd20-29d02822751b service nova] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Refreshing instance network info cache due to event network-changed-271f324e-5244-40df-9393-7b0a123839bb. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 747.925784] env[70020]: DEBUG oslo_concurrency.lockutils [req-3be2f0b6-ce76-460e-bde5-9595aa8d0c79 req-95307465-3d14-4655-bd20-29d02822751b service nova] Acquiring lock "refresh_cache-8f7e4e69-0796-469f-8a2b-4e19fbf15ed3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.925784] env[70020]: DEBUG oslo_concurrency.lockutils [req-3be2f0b6-ce76-460e-bde5-9595aa8d0c79 req-95307465-3d14-4655-bd20-29d02822751b service nova] Acquired lock "refresh_cache-8f7e4e69-0796-469f-8a2b-4e19fbf15ed3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 747.925784] env[70020]: DEBUG nova.network.neutron [req-3be2f0b6-ce76-460e-bde5-9595aa8d0c79 req-95307465-3d14-4655-bd20-29d02822751b service nova] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Refreshing network info cache for port 271f324e-5244-40df-9393-7b0a123839bb {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 748.270018] env[70020]: DEBUG oslo_concurrency.lockutils [None req-92ab9a43-2fd6-4b4b-8ea7-7f52f4db0033 tempest-ImagesOneServerTestJSON-841575098 tempest-ImagesOneServerTestJSON-841575098-project-member] Lock "d0756709-f17b-441e-b537-df937cfbde84" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.428s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 748.373105] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618058, 'name': CreateVM_Task, 'duration_secs': 0.394024} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.374112] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 748.374112] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.374112] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 748.374469] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 748.374732] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3421f48-e2df-4102-9f84-2bf84ad85103 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.379227] env[70020]: DEBUG oslo_vmware.api [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Waiting for the task: (returnval){ [ 748.379227] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52feb10a-aed8-8618-52a1-ced3e90470a8" [ 748.379227] env[70020]: _type = "Task" [ 748.379227] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.387443] env[70020]: DEBUG oslo_vmware.api [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52feb10a-aed8-8618-52a1-ced3e90470a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.748129] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Applying migration context for instance 4b5750d4-98ec-4c70-b214-fad97060b606 as it has an incoming, in-progress migration 2d171af4-44c5-498a-a2f3-345479067b8a. Migration status is confirming {{(pid=70020) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 748.749822] env[70020]: INFO nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Updating resource usage from migration 2d171af4-44c5-498a-a2f3-345479067b8a [ 748.781088] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 1f95bfa8-bc97-4ed7-8c33-c00297430bf5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 748.781335] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance bb4e4986-af2a-4832-9ec7-777bca863dce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 748.781629] env[70020]: WARNING nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance d601179a-df77-4f2e-b8df-9185b8a485e3 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 748.781770] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance ea97f6ab-057e-44d3-835a-68b46d241621 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 748.781917] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance bc57657e-99e8-46b8-9731-ddd4864a3114 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 748.782037] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 08ce6bc8-30fe-4c63-80e1-26c84ae75702 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 748.782153] env[70020]: WARNING nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 3501a6fc-f090-4098-8f63-57a97bd61f1b is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 748.782277] env[70020]: WARNING nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 301b30f6-9909-4fc9-8721-88a314e4edb4 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 748.782391] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 0caa6acd-29d4-43ee-8b32-5149462dfc1c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 748.782496] env[70020]: WARNING nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 748.782614] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 8adadb2e-2a20-45b1-bed8-34e09df25f39 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 748.782726] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Migration 2d171af4-44c5-498a-a2f3-345479067b8a is active on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 192, 'DISK_GB': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 748.782825] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 4b5750d4-98ec-4c70-b214-fad97060b606 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 748.782948] env[70020]: WARNING nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance a09db142-60d1-4a62-8e76-1e2e3676124f is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 748.783589] env[70020]: WARNING nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 19036f6f-2ee3-4ea5-82fa-b510bf903922 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 748.783758] env[70020]: WARNING nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 6c36df58-3ab3-4595-b89c-9ab5a4664eec is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 748.783954] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 748.784114] env[70020]: WARNING nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 61bea079-9731-48d1-b472-b30226a0b5a1 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 748.784239] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 13f6daa5-d859-40ed-b1b0-edd7717b8df3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 748.784357] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 16c45b86-317a-4d0c-a402-51c85af37a5b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 748.784471] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 748.890044] env[70020]: DEBUG oslo_vmware.api [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52feb10a-aed8-8618-52a1-ced3e90470a8, 'name': SearchDatastore_Task, 'duration_secs': 0.011124} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.890360] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 748.890593] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 748.890855] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.891059] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 748.891290] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 748.891616] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-91c6bf30-453e-4b44-b154-e34831dcbef7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.900128] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 748.900352] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 748.901095] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19b4392c-2ddf-4865-bc83-45c876e02f09 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.906765] env[70020]: DEBUG oslo_vmware.api [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Waiting for the task: (returnval){ [ 748.906765] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52a0b3e9-ddee-e39f-c80c-91b96a9de8ff" [ 748.906765] env[70020]: _type = "Task" [ 748.906765] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.915029] env[70020]: DEBUG oslo_vmware.api [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52a0b3e9-ddee-e39f-c80c-91b96a9de8ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.980825] env[70020]: DEBUG nova.network.neutron [req-3be2f0b6-ce76-460e-bde5-9595aa8d0c79 req-95307465-3d14-4655-bd20-29d02822751b service nova] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Updated VIF entry in instance network info cache for port 271f324e-5244-40df-9393-7b0a123839bb. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 748.981057] env[70020]: DEBUG nova.network.neutron [req-3be2f0b6-ce76-460e-bde5-9595aa8d0c79 req-95307465-3d14-4655-bd20-29d02822751b service nova] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Updating instance_info_cache with network_info: [{"id": "271f324e-5244-40df-9393-7b0a123839bb", "address": "fa:16:3e:82:d3:b3", "network": {"id": "a3a15f19-9e39-4316-b45e-431cab365a7a", "bridge": "br-int", "label": "tempest-ServersTestJSON-390220531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df815d08ee8e4aae9d880cdb980c2ad5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccf76700-491b-4462-ab19-e6d3a9ff87ac", "external-id": "nsx-vlan-transportzone-956", "segmentation_id": 956, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap271f324e-52", "ovs_interfaceid": "271f324e-5244-40df-9393-7b0a123839bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.287703] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 738d52c6-0368-434f-a14f-05b47ca865e3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 749.419820] env[70020]: DEBUG oslo_vmware.api [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52a0b3e9-ddee-e39f-c80c-91b96a9de8ff, 'name': SearchDatastore_Task, 'duration_secs': 0.010661} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.423010] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5a327f1-a3ad-4821-b70f-d9d16fca5d0b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.428204] env[70020]: DEBUG oslo_vmware.api [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Waiting for the task: (returnval){ [ 749.428204] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]523ceb9c-1b20-38f9-55ba-ab4896c49350" [ 749.428204] env[70020]: _type = "Task" [ 749.428204] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.436369] env[70020]: DEBUG oslo_vmware.api [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]523ceb9c-1b20-38f9-55ba-ab4896c49350, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.484216] env[70020]: DEBUG oslo_concurrency.lockutils [req-3be2f0b6-ce76-460e-bde5-9595aa8d0c79 req-95307465-3d14-4655-bd20-29d02822751b service nova] Releasing lock "refresh_cache-8f7e4e69-0796-469f-8a2b-4e19fbf15ed3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 749.791519] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 6f2bc97b-0f0a-4f16-b41c-7af96130783f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 749.945543] env[70020]: DEBUG oslo_vmware.api [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]523ceb9c-1b20-38f9-55ba-ab4896c49350, 'name': SearchDatastore_Task, 'duration_secs': 0.009253} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.945820] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 749.946092] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3/8f7e4e69-0796-469f-8a2b-4e19fbf15ed3.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 749.946361] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7878b43c-f302-44c9-ac80-4f94743e51b3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.952925] env[70020]: DEBUG oslo_vmware.api [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Waiting for the task: (returnval){ [ 749.952925] env[70020]: value = "task-3618059" [ 749.952925] env[70020]: _type = "Task" [ 749.952925] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.961891] env[70020]: DEBUG oslo_vmware.api [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Task: {'id': task-3618059, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.303414] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance ae91adc5-b3a4-4503-91f2-d803eaefedc5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 750.463045] env[70020]: DEBUG oslo_vmware.api [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Task: {'id': task-3618059, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.457126} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.463673] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3/8f7e4e69-0796-469f-8a2b-4e19fbf15ed3.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 750.463929] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 750.464218] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-03d8aa20-f2c9-491d-8a92-3692a0e22b4e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.471900] env[70020]: DEBUG oslo_vmware.api [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Waiting for the task: (returnval){ [ 750.471900] env[70020]: value = "task-3618060" [ 750.471900] env[70020]: _type = "Task" [ 750.471900] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.479497] env[70020]: DEBUG oslo_vmware.api [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Task: {'id': task-3618060, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.810033] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 0add6226-3b90-4991-8f2b-81c35e72a7df has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 750.983217] env[70020]: DEBUG oslo_vmware.api [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Task: {'id': task-3618060, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063733} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.983217] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 750.983421] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d2b230c-ad51-41aa-a3de-1903ce85191a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.008876] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3/8f7e4e69-0796-469f-8a2b-4e19fbf15ed3.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 751.009351] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-15e2ad4a-99ac-450c-a163-43f908d495c8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.029848] env[70020]: DEBUG oslo_vmware.api [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Waiting for the task: (returnval){ [ 751.029848] env[70020]: value = "task-3618061" [ 751.029848] env[70020]: _type = "Task" [ 751.029848] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.039900] env[70020]: DEBUG oslo_vmware.api [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Task: {'id': task-3618061, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.118143] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e5af27e0-f7d8-453a-9366-747f68a36a43 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Acquiring lock "08ce6bc8-30fe-4c63-80e1-26c84ae75702" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 751.118362] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e5af27e0-f7d8-453a-9366-747f68a36a43 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "08ce6bc8-30fe-4c63-80e1-26c84ae75702" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 751.313514] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance c9ce57f3-f9a2-40aa-b7eb-403840c34304 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 751.540584] env[70020]: DEBUG oslo_vmware.api [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Task: {'id': task-3618061, 'name': ReconfigVM_Task, 'duration_secs': 0.285664} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.540873] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Reconfigured VM instance instance-00000025 to attach disk [datastore1] 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3/8f7e4e69-0796-469f-8a2b-4e19fbf15ed3.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 751.541649] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-35e7a636-0a61-4b74-83c3-af7add10573c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.548615] env[70020]: DEBUG oslo_vmware.api [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Waiting for the task: (returnval){ [ 751.548615] env[70020]: value = "task-3618062" [ 751.548615] env[70020]: _type = "Task" [ 751.548615] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.557081] env[70020]: DEBUG oslo_vmware.api [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Task: {'id': task-3618062, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.621997] env[70020]: DEBUG nova.compute.utils [None req-e5af27e0-f7d8-453a-9366-747f68a36a43 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 751.818108] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance c08166c5-2c31-4d40-a61c-c541924eb49c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 752.060910] env[70020]: DEBUG oslo_vmware.api [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Task: {'id': task-3618062, 'name': Rename_Task, 'duration_secs': 0.139981} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.062039] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 752.062039] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4471517b-cbc1-4b81-970f-939a4149653a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.071464] env[70020]: DEBUG oslo_vmware.api [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Waiting for the task: (returnval){ [ 752.071464] env[70020]: value = "task-3618063" [ 752.071464] env[70020]: _type = "Task" [ 752.071464] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.080449] env[70020]: DEBUG oslo_vmware.api [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Task: {'id': task-3618063, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.124934] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e5af27e0-f7d8-453a-9366-747f68a36a43 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "08ce6bc8-30fe-4c63-80e1-26c84ae75702" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 752.323099] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 55c20886-ae10-4326-a9de-f8577f320a99 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 752.580265] env[70020]: DEBUG oslo_vmware.api [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Task: {'id': task-3618063, 'name': PowerOnVM_Task, 'duration_secs': 0.440559} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.580527] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 752.580724] env[70020]: INFO nova.compute.manager [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Took 7.01 seconds to spawn the instance on the hypervisor. [ 752.580975] env[70020]: DEBUG nova.compute.manager [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 752.581764] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0da28dcf-6901-4b8e-baed-3d8f8befd0c2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.830852] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance c56279e2-0fc6-4546-854c-82e5fda0e7a7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 753.099031] env[70020]: INFO nova.compute.manager [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Took 53.55 seconds to build instance. [ 753.215797] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e5af27e0-f7d8-453a-9366-747f68a36a43 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Acquiring lock "08ce6bc8-30fe-4c63-80e1-26c84ae75702" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 753.216080] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e5af27e0-f7d8-453a-9366-747f68a36a43 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "08ce6bc8-30fe-4c63-80e1-26c84ae75702" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 753.216336] env[70020]: INFO nova.compute.manager [None req-e5af27e0-f7d8-453a-9366-747f68a36a43 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Attaching volume d8414132-451b-4d65-a184-bdc8c5deb6c9 to /dev/sdb [ 753.253108] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1b1a447-e788-46bc-8f0a-acdcfe9b2b24 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.259993] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a328cb85-2920-42f2-99bd-a7552f7007a3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.272916] env[70020]: DEBUG nova.virt.block_device [None req-e5af27e0-f7d8-453a-9366-747f68a36a43 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Updating existing volume attachment record: 66fa7bf1-0d16-4370-9595-47dc6929b04e {{(pid=70020) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 753.334200] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 48efbd17-ff4e-426a-a135-f43cae8c97d0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 753.601442] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7c463a3b-834a-4a74-8c96-1cf05f726c07 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Lock "8f7e4e69-0796-469f-8a2b-4e19fbf15ed3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.087s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 753.840253] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 2198e7f8-5458-4b97-abb3-0a3c932cebc2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 754.107184] env[70020]: DEBUG nova.compute.manager [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 754.341771] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 36f15b0a-d57f-49d8-9510-1036e889a438 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 754.345095] env[70020]: DEBUG nova.compute.manager [req-e0311f32-52f9-4029-a1b3-376b281a42e5 req-3d72a36b-1b24-4625-bc8d-7272585e8fac service nova] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Received event network-changed-271f324e-5244-40df-9393-7b0a123839bb {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 754.345283] env[70020]: DEBUG nova.compute.manager [req-e0311f32-52f9-4029-a1b3-376b281a42e5 req-3d72a36b-1b24-4625-bc8d-7272585e8fac service nova] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Refreshing instance network info cache due to event network-changed-271f324e-5244-40df-9393-7b0a123839bb. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 754.345493] env[70020]: DEBUG oslo_concurrency.lockutils [req-e0311f32-52f9-4029-a1b3-376b281a42e5 req-3d72a36b-1b24-4625-bc8d-7272585e8fac service nova] Acquiring lock "refresh_cache-8f7e4e69-0796-469f-8a2b-4e19fbf15ed3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.345632] env[70020]: DEBUG oslo_concurrency.lockutils [req-e0311f32-52f9-4029-a1b3-376b281a42e5 req-3d72a36b-1b24-4625-bc8d-7272585e8fac service nova] Acquired lock "refresh_cache-8f7e4e69-0796-469f-8a2b-4e19fbf15ed3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 754.346453] env[70020]: DEBUG nova.network.neutron [req-e0311f32-52f9-4029-a1b3-376b281a42e5 req-3d72a36b-1b24-4625-bc8d-7272585e8fac service nova] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Refreshing network info cache for port 271f324e-5244-40df-9393-7b0a123839bb {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 754.633342] env[70020]: DEBUG oslo_concurrency.lockutils [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 754.848716] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance b99195a6-866e-4142-970a-42a0564889ef has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 755.094230] env[70020]: DEBUG nova.network.neutron [req-e0311f32-52f9-4029-a1b3-376b281a42e5 req-3d72a36b-1b24-4625-bc8d-7272585e8fac service nova] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Updated VIF entry in instance network info cache for port 271f324e-5244-40df-9393-7b0a123839bb. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 755.094579] env[70020]: DEBUG nova.network.neutron [req-e0311f32-52f9-4029-a1b3-376b281a42e5 req-3d72a36b-1b24-4625-bc8d-7272585e8fac service nova] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Updating instance_info_cache with network_info: [{"id": "271f324e-5244-40df-9393-7b0a123839bb", "address": "fa:16:3e:82:d3:b3", "network": {"id": "a3a15f19-9e39-4316-b45e-431cab365a7a", "bridge": "br-int", "label": "tempest-ServersTestJSON-390220531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df815d08ee8e4aae9d880cdb980c2ad5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccf76700-491b-4462-ab19-e6d3a9ff87ac", "external-id": "nsx-vlan-transportzone-956", "segmentation_id": 956, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap271f324e-52", "ovs_interfaceid": "271f324e-5244-40df-9393-7b0a123839bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.353030] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance f56e88f6-3a25-44d9-bdb1-cc4291169c9c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 755.445467] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "5c216231-afc5-41df-a243-bb2a17c20bfe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 755.445690] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "5c216231-afc5-41df-a243-bb2a17c20bfe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 755.599874] env[70020]: DEBUG oslo_concurrency.lockutils [req-e0311f32-52f9-4029-a1b3-376b281a42e5 req-3d72a36b-1b24-4625-bc8d-7272585e8fac service nova] Releasing lock "refresh_cache-8f7e4e69-0796-469f-8a2b-4e19fbf15ed3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 755.856289] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance f7a42358-f26a-4651-a929-d3836f050648 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 756.358848] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance f16d60a4-5f80-4f41-b994-068de48775ad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 756.863289] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 29d41731-4ae2-4cc4-bfda-b7356922c8ff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 757.366585] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 38839949-c717-4f0b-97a7-108d87417b88 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 757.366948] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Total usable vcpus: 48, total allocated vcpus: 13 {{(pid=70020) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 757.367245] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3072MB phys_disk=200GB used_disk=12GB total_vcpus=48 used_vcpus=13 pci_stats=[] {{(pid=70020) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 757.783226] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b21f0cfe-90bd-4772-88c5-8eccd67da94d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.790924] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cda5efd2-9c5e-47c2-8bfd-576ae04cbd06 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.821375] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1c74563-9851-481b-aef4-f1d468ffe32e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.828483] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac80af3c-c8c6-4f0d-b9e6-4310740e362d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.834679] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5af27e0-f7d8-453a-9366-747f68a36a43 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Volume attach. Driver type: vmdk {{(pid=70020) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 757.834889] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5af27e0-f7d8-453a-9366-747f68a36a43 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721646', 'volume_id': 'd8414132-451b-4d65-a184-bdc8c5deb6c9', 'name': 'volume-d8414132-451b-4d65-a184-bdc8c5deb6c9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '08ce6bc8-30fe-4c63-80e1-26c84ae75702', 'attached_at': '', 'detached_at': '', 'volume_id': 'd8414132-451b-4d65-a184-bdc8c5deb6c9', 'serial': 'd8414132-451b-4d65-a184-bdc8c5deb6c9'} {{(pid=70020) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 757.842872] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0cfe6b2-de54-41eb-925b-cc1842cbc8a8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.845277] env[70020]: DEBUG nova.compute.provider_tree [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 757.859362] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e85635e-2784-49d7-b738-5a6a80aa5538 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.883935] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5af27e0-f7d8-453a-9366-747f68a36a43 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] volume-d8414132-451b-4d65-a184-bdc8c5deb6c9/volume-d8414132-451b-4d65-a184-bdc8c5deb6c9.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 757.884243] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-47890378-9fe7-4830-81f2-bee146b96c52 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.902242] env[70020]: DEBUG oslo_vmware.api [None req-e5af27e0-f7d8-453a-9366-747f68a36a43 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for the task: (returnval){ [ 757.902242] env[70020]: value = "task-3618068" [ 757.902242] env[70020]: _type = "Task" [ 757.902242] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.909615] env[70020]: DEBUG oslo_vmware.api [None req-e5af27e0-f7d8-453a-9366-747f68a36a43 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618068, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.349045] env[70020]: DEBUG nova.scheduler.client.report [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 758.412345] env[70020]: DEBUG oslo_vmware.api [None req-e5af27e0-f7d8-453a-9366-747f68a36a43 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618068, 'name': ReconfigVM_Task, 'duration_secs': 0.364669} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.412658] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5af27e0-f7d8-453a-9366-747f68a36a43 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Reconfigured VM instance instance-0000000f to attach disk [datastore1] volume-d8414132-451b-4d65-a184-bdc8c5deb6c9/volume-d8414132-451b-4d65-a184-bdc8c5deb6c9.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 758.417280] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-de3dfc44-e205-4d9c-8121-40f7fa969a19 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.431442] env[70020]: DEBUG oslo_vmware.api [None req-e5af27e0-f7d8-453a-9366-747f68a36a43 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for the task: (returnval){ [ 758.431442] env[70020]: value = "task-3618069" [ 758.431442] env[70020]: _type = "Task" [ 758.431442] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.439087] env[70020]: DEBUG oslo_vmware.api [None req-e5af27e0-f7d8-453a-9366-747f68a36a43 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618069, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.854446] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=70020) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 758.854733] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 11.123s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 758.855028] env[70020]: DEBUG oslo_concurrency.lockutils [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 51.387s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 758.856533] env[70020]: INFO nova.compute.claims [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 758.941548] env[70020]: DEBUG oslo_vmware.api [None req-e5af27e0-f7d8-453a-9366-747f68a36a43 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618069, 'name': ReconfigVM_Task, 'duration_secs': 0.133288} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.941873] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5af27e0-f7d8-453a-9366-747f68a36a43 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721646', 'volume_id': 'd8414132-451b-4d65-a184-bdc8c5deb6c9', 'name': 'volume-d8414132-451b-4d65-a184-bdc8c5deb6c9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '08ce6bc8-30fe-4c63-80e1-26c84ae75702', 'attached_at': '', 'detached_at': '', 'volume_id': 'd8414132-451b-4d65-a184-bdc8c5deb6c9', 'serial': 'd8414132-451b-4d65-a184-bdc8c5deb6c9'} {{(pid=70020) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 759.984628] env[70020]: DEBUG nova.objects.instance [None req-e5af27e0-f7d8-453a-9366-747f68a36a43 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lazy-loading 'flavor' on Instance uuid 08ce6bc8-30fe-4c63-80e1-26c84ae75702 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 760.341644] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97a139b9-98a2-411b-8f39-d000d3e9c42c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.351184] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ac20558-c5e1-4330-92ec-a074efbf483b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.384601] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11061941-0692-43ba-8ed8-f87088271764 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.392206] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36b366d0-cd49-478f-b11d-9c2c46955c58 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.405783] env[70020]: DEBUG nova.compute.provider_tree [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 760.492884] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e5af27e0-f7d8-453a-9366-747f68a36a43 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "08ce6bc8-30fe-4c63-80e1-26c84ae75702" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.276s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 760.603104] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2c96a538-b434-4bd7-9231-305e70a7a6b4 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Acquiring lock "08ce6bc8-30fe-4c63-80e1-26c84ae75702" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 760.603388] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2c96a538-b434-4bd7-9231-305e70a7a6b4 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "08ce6bc8-30fe-4c63-80e1-26c84ae75702" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 760.603621] env[70020]: DEBUG nova.compute.manager [None req-2c96a538-b434-4bd7-9231-305e70a7a6b4 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 760.604571] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ebfb40f-dcbf-41e7-98e9-9e7a78a13267 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.611158] env[70020]: DEBUG nova.compute.manager [None req-2c96a538-b434-4bd7-9231-305e70a7a6b4 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=70020) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 760.611770] env[70020]: DEBUG nova.objects.instance [None req-2c96a538-b434-4bd7-9231-305e70a7a6b4 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lazy-loading 'flavor' on Instance uuid 08ce6bc8-30fe-4c63-80e1-26c84ae75702 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 760.909279] env[70020]: DEBUG nova.scheduler.client.report [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 761.414488] env[70020]: DEBUG oslo_concurrency.lockutils [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.559s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 761.415027] env[70020]: DEBUG nova.compute.manager [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 761.417602] env[70020]: DEBUG oslo_concurrency.lockutils [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 53.444s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.419079] env[70020]: INFO nova.compute.claims [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 761.618216] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c96a538-b434-4bd7-9231-305e70a7a6b4 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 761.618625] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0cccf6e4-13a2-4780-ab0d-2bdc500676b5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.626645] env[70020]: DEBUG oslo_vmware.api [None req-2c96a538-b434-4bd7-9231-305e70a7a6b4 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for the task: (returnval){ [ 761.626645] env[70020]: value = "task-3618070" [ 761.626645] env[70020]: _type = "Task" [ 761.626645] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.635746] env[70020]: DEBUG oslo_vmware.api [None req-2c96a538-b434-4bd7-9231-305e70a7a6b4 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618070, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.924060] env[70020]: DEBUG nova.compute.utils [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 761.928503] env[70020]: DEBUG nova.compute.manager [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 761.928503] env[70020]: DEBUG nova.network.neutron [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 761.976558] env[70020]: DEBUG nova.policy [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1a50b41a53354ffc9b89ea3d769590d2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f81ac3e65f9042f4bcf818cd216a32eb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 762.137272] env[70020]: DEBUG oslo_vmware.api [None req-2c96a538-b434-4bd7-9231-305e70a7a6b4 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618070, 'name': PowerOffVM_Task, 'duration_secs': 0.22798} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.137531] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c96a538-b434-4bd7-9231-305e70a7a6b4 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 762.137723] env[70020]: DEBUG nova.compute.manager [None req-2c96a538-b434-4bd7-9231-305e70a7a6b4 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 762.138521] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e7fb273-c085-4258-8ad1-199787e4c45f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.229356] env[70020]: DEBUG nova.network.neutron [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Successfully created port: c2d9bed7-29b7-41c5-9e01-b47d54359ea0 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 762.427464] env[70020]: DEBUG nova.compute.manager [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 762.652882] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2c96a538-b434-4bd7-9231-305e70a7a6b4 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "08ce6bc8-30fe-4c63-80e1-26c84ae75702" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.049s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 762.940472] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a78398ed-a5ea-44b5-9491-2c26ba585867 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.951225] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f8331af-6d5f-4d99-aaf9-3d85bc811f4c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.983742] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fafef70-a24f-4418-a181-5f814a48a12d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.992531] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48c8a625-3655-4d8c-94bb-e916caf8b2ce {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.007779] env[70020]: DEBUG nova.compute.provider_tree [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 763.126257] env[70020]: DEBUG nova.objects.instance [None req-10e963ab-1add-4614-bd7d-51aa68b853e8 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lazy-loading 'flavor' on Instance uuid 08ce6bc8-30fe-4c63-80e1-26c84ae75702 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 763.446605] env[70020]: DEBUG nova.compute.manager [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 763.481014] env[70020]: DEBUG nova.virt.hardware [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 763.481324] env[70020]: DEBUG nova.virt.hardware [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 763.481496] env[70020]: DEBUG nova.virt.hardware [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 763.481683] env[70020]: DEBUG nova.virt.hardware [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 763.481834] env[70020]: DEBUG nova.virt.hardware [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 763.481980] env[70020]: DEBUG nova.virt.hardware [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 763.482208] env[70020]: DEBUG nova.virt.hardware [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 763.482366] env[70020]: DEBUG nova.virt.hardware [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 763.482549] env[70020]: DEBUG nova.virt.hardware [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 763.482692] env[70020]: DEBUG nova.virt.hardware [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 763.482856] env[70020]: DEBUG nova.virt.hardware [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 763.483730] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44e84eaa-b5fd-415c-a874-54f1bce5cd50 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.492566] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04234fae-1cc9-4756-aefa-325c7e23a2eb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.511822] env[70020]: DEBUG nova.scheduler.client.report [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 763.628020] env[70020]: DEBUG nova.compute.manager [req-786deae1-00bf-4b6b-8e61-0b2f42c96154 req-c33dedd1-bb4a-4965-bb6b-34a2b15e1251 service nova] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Received event network-vif-plugged-c2d9bed7-29b7-41c5-9e01-b47d54359ea0 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 763.628464] env[70020]: DEBUG oslo_concurrency.lockutils [req-786deae1-00bf-4b6b-8e61-0b2f42c96154 req-c33dedd1-bb4a-4965-bb6b-34a2b15e1251 service nova] Acquiring lock "738d52c6-0368-434f-a14f-05b47ca865e3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 763.628776] env[70020]: DEBUG oslo_concurrency.lockutils [req-786deae1-00bf-4b6b-8e61-0b2f42c96154 req-c33dedd1-bb4a-4965-bb6b-34a2b15e1251 service nova] Lock "738d52c6-0368-434f-a14f-05b47ca865e3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 763.628980] env[70020]: DEBUG oslo_concurrency.lockutils [req-786deae1-00bf-4b6b-8e61-0b2f42c96154 req-c33dedd1-bb4a-4965-bb6b-34a2b15e1251 service nova] Lock "738d52c6-0368-434f-a14f-05b47ca865e3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 763.629191] env[70020]: DEBUG nova.compute.manager [req-786deae1-00bf-4b6b-8e61-0b2f42c96154 req-c33dedd1-bb4a-4965-bb6b-34a2b15e1251 service nova] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] No waiting events found dispatching network-vif-plugged-c2d9bed7-29b7-41c5-9e01-b47d54359ea0 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 763.629379] env[70020]: WARNING nova.compute.manager [req-786deae1-00bf-4b6b-8e61-0b2f42c96154 req-c33dedd1-bb4a-4965-bb6b-34a2b15e1251 service nova] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Received unexpected event network-vif-plugged-c2d9bed7-29b7-41c5-9e01-b47d54359ea0 for instance with vm_state building and task_state spawning. [ 763.633230] env[70020]: DEBUG oslo_concurrency.lockutils [None req-10e963ab-1add-4614-bd7d-51aa68b853e8 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Acquiring lock "refresh_cache-08ce6bc8-30fe-4c63-80e1-26c84ae75702" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.633533] env[70020]: DEBUG oslo_concurrency.lockutils [None req-10e963ab-1add-4614-bd7d-51aa68b853e8 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Acquired lock "refresh_cache-08ce6bc8-30fe-4c63-80e1-26c84ae75702" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 763.634065] env[70020]: DEBUG nova.network.neutron [None req-10e963ab-1add-4614-bd7d-51aa68b853e8 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 763.634218] env[70020]: DEBUG nova.objects.instance [None req-10e963ab-1add-4614-bd7d-51aa68b853e8 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lazy-loading 'info_cache' on Instance uuid 08ce6bc8-30fe-4c63-80e1-26c84ae75702 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 763.705135] env[70020]: DEBUG nova.network.neutron [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Successfully updated port: c2d9bed7-29b7-41c5-9e01-b47d54359ea0 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 764.017845] env[70020]: DEBUG oslo_concurrency.lockutils [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.600s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 764.018374] env[70020]: DEBUG nova.compute.manager [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 764.021561] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2846aa56-4eef-4cfb-be8c-05126d618886 tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 55.682s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 764.021799] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2846aa56-4eef-4cfb-be8c-05126d618886 tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 764.024028] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6bb5f35a-f742-49a2-a564-e2dc69aa44de tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 54.769s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 764.053890] env[70020]: INFO nova.scheduler.client.report [None req-2846aa56-4eef-4cfb-be8c-05126d618886 tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Deleted allocations for instance 3501a6fc-f090-4098-8f63-57a97bd61f1b [ 764.137904] env[70020]: DEBUG nova.objects.base [None req-10e963ab-1add-4614-bd7d-51aa68b853e8 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Object Instance<08ce6bc8-30fe-4c63-80e1-26c84ae75702> lazy-loaded attributes: flavor,info_cache {{(pid=70020) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 764.213455] env[70020]: DEBUG oslo_concurrency.lockutils [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Acquiring lock "refresh_cache-738d52c6-0368-434f-a14f-05b47ca865e3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.213594] env[70020]: DEBUG oslo_concurrency.lockutils [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Acquired lock "refresh_cache-738d52c6-0368-434f-a14f-05b47ca865e3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 764.213746] env[70020]: DEBUG nova.network.neutron [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 764.527333] env[70020]: DEBUG nova.compute.utils [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 764.532757] env[70020]: DEBUG nova.compute.manager [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 764.532757] env[70020]: DEBUG nova.network.neutron [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 764.567000] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2846aa56-4eef-4cfb-be8c-05126d618886 tempest-ServersV294TestFqdnHostnames-1836307938 tempest-ServersV294TestFqdnHostnames-1836307938-project-member] Lock "3501a6fc-f090-4098-8f63-57a97bd61f1b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 60.140s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 764.604217] env[70020]: DEBUG nova.policy [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4725235f14474e03aa74fe2a76cbf329', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4ec890d4297a40ba9998728c53680046', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 764.769953] env[70020]: DEBUG nova.network.neutron [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 765.006690] env[70020]: DEBUG nova.network.neutron [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Updating instance_info_cache with network_info: [{"id": "c2d9bed7-29b7-41c5-9e01-b47d54359ea0", "address": "fa:16:3e:ad:ea:77", "network": {"id": "023ef52b-8a34-4f0e-bc94-cf1fa8c3fb77", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1722246036-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f81ac3e65f9042f4bcf818cd216a32eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2d9bed7-29", "ovs_interfaceid": "c2d9bed7-29b7-41c5-9e01-b47d54359ea0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.034988] env[70020]: DEBUG nova.compute.manager [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 765.039373] env[70020]: DEBUG nova.network.neutron [None req-10e963ab-1add-4614-bd7d-51aa68b853e8 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Updating instance_info_cache with network_info: [{"id": "7cbd6812-9369-466e-a269-def6f4b8ed8f", "address": "fa:16:3e:99:62:fe", "network": {"id": "1aa98d4f-b271-4d4d-b8d3-156403999ea9", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1993994671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.133", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7412ed0b196c4d44b03bc93b0aae2954", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "503991c4-44d0-42d9-aa03-5259331f1051", "external-id": "nsx-vlan-transportzone-3", "segmentation_id": 3, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cbd6812-93", "ovs_interfaceid": "7cbd6812-9369-466e-a269-def6f4b8ed8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.098302] env[70020]: DEBUG nova.network.neutron [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Successfully created port: c6bd89ff-30f7-46ff-b392-16a88577740a {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 765.102425] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5866a6f-5af0-4f27-9233-e99f579fe120 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.110412] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3839c9d-434a-4b82-b102-f5196fcf7f64 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.150040] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0413d127-3def-4564-9a88-1ed274d1e55f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.155971] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c3d2030-5b7c-4fdd-a5d7-124dea5cec0c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.173234] env[70020]: DEBUG nova.compute.provider_tree [None req-6bb5f35a-f742-49a2-a564-e2dc69aa44de tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 765.508988] env[70020]: DEBUG oslo_concurrency.lockutils [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Releasing lock "refresh_cache-738d52c6-0368-434f-a14f-05b47ca865e3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 765.509377] env[70020]: DEBUG nova.compute.manager [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Instance network_info: |[{"id": "c2d9bed7-29b7-41c5-9e01-b47d54359ea0", "address": "fa:16:3e:ad:ea:77", "network": {"id": "023ef52b-8a34-4f0e-bc94-cf1fa8c3fb77", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1722246036-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f81ac3e65f9042f4bcf818cd216a32eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2d9bed7-29", "ovs_interfaceid": "c2d9bed7-29b7-41c5-9e01-b47d54359ea0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 765.509874] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ad:ea:77', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f67a2790-f2b0-4d03-b606-0bfaee7a4229', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c2d9bed7-29b7-41c5-9e01-b47d54359ea0', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 765.517693] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 765.517930] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 765.518178] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-08941b5d-147a-4061-8908-f27f649074eb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.541190] env[70020]: INFO nova.virt.block_device [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Booting with volume b709f316-53b7-4e6a-a871-7ecc3270770e at /dev/sda [ 765.546139] env[70020]: DEBUG oslo_concurrency.lockutils [None req-10e963ab-1add-4614-bd7d-51aa68b853e8 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Releasing lock "refresh_cache-08ce6bc8-30fe-4c63-80e1-26c84ae75702" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 765.548599] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 765.548599] env[70020]: value = "task-3618071" [ 765.548599] env[70020]: _type = "Task" [ 765.548599] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.561939] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618071, 'name': CreateVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.610468] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-169e8bb1-7ee5-46f5-a73c-f5cfe1885028 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.621631] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bece07e-4039-4e5d-af11-60a76a032ca9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.657734] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0d95f42f-1007-48da-a7ef-e1a2c9e6d2de {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.660799] env[70020]: DEBUG nova.compute.manager [req-363ee450-94d8-45ec-b784-53eebe457e53 req-b47f8091-701a-4aa8-bb57-f09e0290f4bf service nova] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Received event network-changed-c2d9bed7-29b7-41c5-9e01-b47d54359ea0 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 765.661080] env[70020]: DEBUG nova.compute.manager [req-363ee450-94d8-45ec-b784-53eebe457e53 req-b47f8091-701a-4aa8-bb57-f09e0290f4bf service nova] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Refreshing instance network info cache due to event network-changed-c2d9bed7-29b7-41c5-9e01-b47d54359ea0. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 765.661435] env[70020]: DEBUG oslo_concurrency.lockutils [req-363ee450-94d8-45ec-b784-53eebe457e53 req-b47f8091-701a-4aa8-bb57-f09e0290f4bf service nova] Acquiring lock "refresh_cache-738d52c6-0368-434f-a14f-05b47ca865e3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.661631] env[70020]: DEBUG oslo_concurrency.lockutils [req-363ee450-94d8-45ec-b784-53eebe457e53 req-b47f8091-701a-4aa8-bb57-f09e0290f4bf service nova] Acquired lock "refresh_cache-738d52c6-0368-434f-a14f-05b47ca865e3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 765.661840] env[70020]: DEBUG nova.network.neutron [req-363ee450-94d8-45ec-b784-53eebe457e53 req-b47f8091-701a-4aa8-bb57-f09e0290f4bf service nova] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Refreshing network info cache for port c2d9bed7-29b7-41c5-9e01-b47d54359ea0 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 765.671844] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c322c855-ecab-41e3-bb40-136876f3de33 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.684108] env[70020]: DEBUG nova.scheduler.client.report [None req-6bb5f35a-f742-49a2-a564-e2dc69aa44de tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 765.710970] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5de68b50-7eff-4719-b962-414d90d975be {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.720628] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcda29e9-880c-40c6-aaa3-68218a6b28bf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.733604] env[70020]: DEBUG nova.virt.block_device [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Updating existing volume attachment record: e6aed752-f439-4134-8042-e6ab2bb7848f {{(pid=70020) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 765.827957] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Acquiring lock "b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 765.828334] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Lock "b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 765.828644] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Acquiring lock "b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 765.828949] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Lock "b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 765.829222] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Lock "b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 765.832543] env[70020]: INFO nova.compute.manager [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Terminating instance [ 766.061425] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618071, 'name': CreateVM_Task, 'duration_secs': 0.308579} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.061762] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 766.062715] env[70020]: DEBUG oslo_concurrency.lockutils [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.062937] env[70020]: DEBUG oslo_concurrency.lockutils [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 766.063851] env[70020]: DEBUG oslo_concurrency.lockutils [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 766.065242] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b3ff10e-5651-4331-aaae-6a307417f414 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.071038] env[70020]: DEBUG oslo_vmware.api [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Waiting for the task: (returnval){ [ 766.071038] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]525d34af-e8fc-0bcb-687f-c9e91e5cab83" [ 766.071038] env[70020]: _type = "Task" [ 766.071038] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.079880] env[70020]: DEBUG oslo_vmware.api [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]525d34af-e8fc-0bcb-687f-c9e91e5cab83, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.338350] env[70020]: DEBUG nova.compute.manager [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 766.338599] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 766.339193] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e5877f09-ade8-466c-aadf-93171ffd8d26 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.346467] env[70020]: DEBUG oslo_vmware.api [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Waiting for the task: (returnval){ [ 766.346467] env[70020]: value = "task-3618072" [ 766.346467] env[70020]: _type = "Task" [ 766.346467] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.354575] env[70020]: DEBUG oslo_vmware.api [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Task: {'id': task-3618072, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.558627] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-10e963ab-1add-4614-bd7d-51aa68b853e8 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 766.558970] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-65652678-1c3b-470a-8911-ca97ce1de772 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.568583] env[70020]: DEBUG oslo_vmware.api [None req-10e963ab-1add-4614-bd7d-51aa68b853e8 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for the task: (returnval){ [ 766.568583] env[70020]: value = "task-3618073" [ 766.568583] env[70020]: _type = "Task" [ 766.568583] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.583317] env[70020]: DEBUG oslo_vmware.api [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]525d34af-e8fc-0bcb-687f-c9e91e5cab83, 'name': SearchDatastore_Task, 'duration_secs': 0.009241} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.586721] env[70020]: DEBUG oslo_concurrency.lockutils [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 766.586983] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 766.587268] env[70020]: DEBUG oslo_concurrency.lockutils [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.587444] env[70020]: DEBUG oslo_concurrency.lockutils [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 766.587635] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 766.587903] env[70020]: DEBUG oslo_vmware.api [None req-10e963ab-1add-4614-bd7d-51aa68b853e8 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618073, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.588126] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6046494c-882b-40db-8bdb-b42d06529f12 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.598922] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 766.599160] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 766.600407] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03ae7f30-c3f3-4cca-a332-c226c69195a2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.606577] env[70020]: DEBUG oslo_vmware.api [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Waiting for the task: (returnval){ [ 766.606577] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]521ecae1-617b-2cb0-ae65-4d37a954bbda" [ 766.606577] env[70020]: _type = "Task" [ 766.606577] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.616259] env[70020]: DEBUG oslo_vmware.api [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]521ecae1-617b-2cb0-ae65-4d37a954bbda, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.696862] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6bb5f35a-f742-49a2-a564-e2dc69aa44de tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.673s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 766.701046] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 55.694s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 766.702883] env[70020]: INFO nova.compute.claims [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 766.737056] env[70020]: DEBUG nova.network.neutron [req-363ee450-94d8-45ec-b784-53eebe457e53 req-b47f8091-701a-4aa8-bb57-f09e0290f4bf service nova] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Updated VIF entry in instance network info cache for port c2d9bed7-29b7-41c5-9e01-b47d54359ea0. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 766.737472] env[70020]: DEBUG nova.network.neutron [req-363ee450-94d8-45ec-b784-53eebe457e53 req-b47f8091-701a-4aa8-bb57-f09e0290f4bf service nova] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Updating instance_info_cache with network_info: [{"id": "c2d9bed7-29b7-41c5-9e01-b47d54359ea0", "address": "fa:16:3e:ad:ea:77", "network": {"id": "023ef52b-8a34-4f0e-bc94-cf1fa8c3fb77", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1722246036-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f81ac3e65f9042f4bcf818cd216a32eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2d9bed7-29", "ovs_interfaceid": "c2d9bed7-29b7-41c5-9e01-b47d54359ea0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.858446] env[70020]: DEBUG oslo_vmware.api [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Task: {'id': task-3618072, 'name': PowerOffVM_Task, 'duration_secs': 0.375688} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.858803] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 766.859015] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Volume detach. Driver type: vmdk {{(pid=70020) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 766.859275] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721586', 'volume_id': '1e88bef3-79b2-4977-918f-5c3ee9732e62', 'name': 'volume-1e88bef3-79b2-4977-918f-5c3ee9732e62', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e', 'attached_at': '', 'detached_at': '', 'volume_id': '1e88bef3-79b2-4977-918f-5c3ee9732e62', 'serial': '1e88bef3-79b2-4977-918f-5c3ee9732e62'} {{(pid=70020) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 766.860097] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f59c2b2-4b05-4a9b-af14-f501e30026e5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.880986] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2895a1f9-c1ea-45e3-99f0-107fd650720b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.891248] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0f27755-1ac2-4da9-ab5f-748447b1f388 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.911968] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42195f95-ff26-4f34-b5da-5f80164538f4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.929667] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] The volume has not been displaced from its original location: [datastore1] volume-1e88bef3-79b2-4977-918f-5c3ee9732e62/volume-1e88bef3-79b2-4977-918f-5c3ee9732e62.vmdk. No consolidation needed. {{(pid=70020) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 766.935848] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Reconfiguring VM instance instance-00000021 to detach disk 2000 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 766.936312] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a95bccf-d326-4a66-9cdb-f41b4030bae9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.954752] env[70020]: DEBUG oslo_vmware.api [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Waiting for the task: (returnval){ [ 766.954752] env[70020]: value = "task-3618074" [ 766.954752] env[70020]: _type = "Task" [ 766.954752] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.962741] env[70020]: DEBUG oslo_vmware.api [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Task: {'id': task-3618074, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.060463] env[70020]: DEBUG nova.network.neutron [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Successfully updated port: c6bd89ff-30f7-46ff-b392-16a88577740a {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 767.079784] env[70020]: DEBUG oslo_vmware.api [None req-10e963ab-1add-4614-bd7d-51aa68b853e8 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618073, 'name': PowerOnVM_Task, 'duration_secs': 0.398446} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.079784] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-10e963ab-1add-4614-bd7d-51aa68b853e8 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 767.079784] env[70020]: DEBUG nova.compute.manager [None req-10e963ab-1add-4614-bd7d-51aa68b853e8 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 767.081075] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a84eef6f-dba9-4a1e-a23b-2765d0c19dfa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.118367] env[70020]: DEBUG oslo_vmware.api [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]521ecae1-617b-2cb0-ae65-4d37a954bbda, 'name': SearchDatastore_Task, 'duration_secs': 0.009899} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.120183] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61a28bbf-6971-4f73-ab86-4f863c2d7891 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.125552] env[70020]: DEBUG oslo_vmware.api [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Waiting for the task: (returnval){ [ 767.125552] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5293c7f6-b7a5-d22d-7071-f6a1598351dc" [ 767.125552] env[70020]: _type = "Task" [ 767.125552] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.134533] env[70020]: DEBUG oslo_vmware.api [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5293c7f6-b7a5-d22d-7071-f6a1598351dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.240586] env[70020]: DEBUG oslo_concurrency.lockutils [req-363ee450-94d8-45ec-b784-53eebe457e53 req-b47f8091-701a-4aa8-bb57-f09e0290f4bf service nova] Releasing lock "refresh_cache-738d52c6-0368-434f-a14f-05b47ca865e3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 767.271870] env[70020]: INFO nova.scheduler.client.report [None req-6bb5f35a-f742-49a2-a564-e2dc69aa44de tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Deleted allocation for migration 2d171af4-44c5-498a-a2f3-345479067b8a [ 767.465204] env[70020]: DEBUG oslo_vmware.api [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Task: {'id': task-3618074, 'name': ReconfigVM_Task, 'duration_secs': 0.261815} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.465489] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Reconfigured VM instance instance-00000021 to detach disk 2000 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 767.470252] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6bbaf993-f29f-4cbf-8861-371021f14b88 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.485402] env[70020]: DEBUG oslo_vmware.api [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Waiting for the task: (returnval){ [ 767.485402] env[70020]: value = "task-3618075" [ 767.485402] env[70020]: _type = "Task" [ 767.485402] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.493590] env[70020]: DEBUG oslo_vmware.api [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Task: {'id': task-3618075, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.563272] env[70020]: DEBUG oslo_concurrency.lockutils [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Acquiring lock "refresh_cache-6f2bc97b-0f0a-4f16-b41c-7af96130783f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.563549] env[70020]: DEBUG oslo_concurrency.lockutils [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Acquired lock "refresh_cache-6f2bc97b-0f0a-4f16-b41c-7af96130783f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 767.563585] env[70020]: DEBUG nova.network.neutron [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 767.637662] env[70020]: DEBUG oslo_vmware.api [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5293c7f6-b7a5-d22d-7071-f6a1598351dc, 'name': SearchDatastore_Task, 'duration_secs': 0.009296} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.638065] env[70020]: DEBUG oslo_concurrency.lockutils [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 767.638432] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 738d52c6-0368-434f-a14f-05b47ca865e3/738d52c6-0368-434f-a14f-05b47ca865e3.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 767.638959] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9c40e54e-0ce7-4977-9044-62c2b1d2c480 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.647129] env[70020]: DEBUG oslo_vmware.api [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Waiting for the task: (returnval){ [ 767.647129] env[70020]: value = "task-3618076" [ 767.647129] env[70020]: _type = "Task" [ 767.647129] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.662020] env[70020]: DEBUG oslo_vmware.api [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3618076, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.778717] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6bb5f35a-f742-49a2-a564-e2dc69aa44de tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Lock "4b5750d4-98ec-4c70-b214-fad97060b606" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 62.179s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 767.825617] env[70020]: DEBUG nova.compute.manager [req-0a81368f-a849-4166-8b93-52b01a93f549 req-7c1798e7-1350-400a-a68c-78ca32bb8cc9 service nova] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Received event network-vif-plugged-c6bd89ff-30f7-46ff-b392-16a88577740a {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 767.825835] env[70020]: DEBUG oslo_concurrency.lockutils [req-0a81368f-a849-4166-8b93-52b01a93f549 req-7c1798e7-1350-400a-a68c-78ca32bb8cc9 service nova] Acquiring lock "6f2bc97b-0f0a-4f16-b41c-7af96130783f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 767.826818] env[70020]: DEBUG oslo_concurrency.lockutils [req-0a81368f-a849-4166-8b93-52b01a93f549 req-7c1798e7-1350-400a-a68c-78ca32bb8cc9 service nova] Lock "6f2bc97b-0f0a-4f16-b41c-7af96130783f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 767.826907] env[70020]: DEBUG oslo_concurrency.lockutils [req-0a81368f-a849-4166-8b93-52b01a93f549 req-7c1798e7-1350-400a-a68c-78ca32bb8cc9 service nova] Lock "6f2bc97b-0f0a-4f16-b41c-7af96130783f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 767.827164] env[70020]: DEBUG nova.compute.manager [req-0a81368f-a849-4166-8b93-52b01a93f549 req-7c1798e7-1350-400a-a68c-78ca32bb8cc9 service nova] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] No waiting events found dispatching network-vif-plugged-c6bd89ff-30f7-46ff-b392-16a88577740a {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 767.827355] env[70020]: WARNING nova.compute.manager [req-0a81368f-a849-4166-8b93-52b01a93f549 req-7c1798e7-1350-400a-a68c-78ca32bb8cc9 service nova] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Received unexpected event network-vif-plugged-c6bd89ff-30f7-46ff-b392-16a88577740a for instance with vm_state building and task_state spawning. [ 767.827602] env[70020]: DEBUG nova.compute.manager [req-0a81368f-a849-4166-8b93-52b01a93f549 req-7c1798e7-1350-400a-a68c-78ca32bb8cc9 service nova] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Received event network-changed-c6bd89ff-30f7-46ff-b392-16a88577740a {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 767.827690] env[70020]: DEBUG nova.compute.manager [req-0a81368f-a849-4166-8b93-52b01a93f549 req-7c1798e7-1350-400a-a68c-78ca32bb8cc9 service nova] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Refreshing instance network info cache due to event network-changed-c6bd89ff-30f7-46ff-b392-16a88577740a. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 767.827908] env[70020]: DEBUG oslo_concurrency.lockutils [req-0a81368f-a849-4166-8b93-52b01a93f549 req-7c1798e7-1350-400a-a68c-78ca32bb8cc9 service nova] Acquiring lock "refresh_cache-6f2bc97b-0f0a-4f16-b41c-7af96130783f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.839906] env[70020]: DEBUG nova.compute.manager [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 767.840496] env[70020]: DEBUG nova.virt.hardware [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 767.840787] env[70020]: DEBUG nova.virt.hardware [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 767.840973] env[70020]: DEBUG nova.virt.hardware [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 767.841238] env[70020]: DEBUG nova.virt.hardware [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 767.841613] env[70020]: DEBUG nova.virt.hardware [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 767.841787] env[70020]: DEBUG nova.virt.hardware [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 767.842077] env[70020]: DEBUG nova.virt.hardware [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 767.842286] env[70020]: DEBUG nova.virt.hardware [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 767.842553] env[70020]: DEBUG nova.virt.hardware [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 767.842690] env[70020]: DEBUG nova.virt.hardware [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 767.842905] env[70020]: DEBUG nova.virt.hardware [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 767.844797] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d9027e4-6114-4607-b273-9d0462c4616f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.856956] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2ac1cc4-1e67-47a6-8a96-3aafa0376880 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.996700] env[70020]: DEBUG oslo_vmware.api [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Task: {'id': task-3618075, 'name': ReconfigVM_Task, 'duration_secs': 0.242813} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.996700] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721586', 'volume_id': '1e88bef3-79b2-4977-918f-5c3ee9732e62', 'name': 'volume-1e88bef3-79b2-4977-918f-5c3ee9732e62', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e', 'attached_at': '', 'detached_at': '', 'volume_id': '1e88bef3-79b2-4977-918f-5c3ee9732e62', 'serial': '1e88bef3-79b2-4977-918f-5c3ee9732e62'} {{(pid=70020) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 767.996700] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 767.997126] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fd395af-1a7f-4d34-ab77-ff45aeb454f0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.007384] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 768.007652] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b8b1af63-8853-431e-9d80-dc24f24d045f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.122074] env[70020]: DEBUG nova.network.neutron [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 768.159233] env[70020]: DEBUG oslo_vmware.api [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3618076, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.305764] env[70020]: DEBUG nova.network.neutron [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Updating instance_info_cache with network_info: [{"id": "c6bd89ff-30f7-46ff-b392-16a88577740a", "address": "fa:16:3e:43:f7:71", "network": {"id": "56f227bb-31f0-4ffa-af00-d53134c6fb95", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-290114732-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ec890d4297a40ba9998728c53680046", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56398cc0-e39f-410f-8036-8c2a6870e26f", "external-id": "nsx-vlan-transportzone-612", "segmentation_id": 612, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6bd89ff-30", "ovs_interfaceid": "c6bd89ff-30f7-46ff-b392-16a88577740a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.350933] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a723444-90de-417e-8b1e-d2c4c7ec265e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.360781] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81ec2055-cd68-4930-a922-6bc9df94f6c2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.404578] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31da1ae0-d920-484a-9fb7-ec089dea18f0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.416323] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-393c3a81-657f-4379-b48c-021341ce7e28 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.432786] env[70020]: DEBUG nova.compute.provider_tree [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 768.658313] env[70020]: DEBUG oslo_vmware.api [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3618076, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.520335} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.658660] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 738d52c6-0368-434f-a14f-05b47ca865e3/738d52c6-0368-434f-a14f-05b47ca865e3.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 768.658851] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 768.659131] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8340bb29-1b42-4152-b167-01c11fab62df {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.666680] env[70020]: DEBUG oslo_vmware.api [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Waiting for the task: (returnval){ [ 768.666680] env[70020]: value = "task-3618078" [ 768.666680] env[70020]: _type = "Task" [ 768.666680] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.676322] env[70020]: DEBUG oslo_vmware.api [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3618078, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.810673] env[70020]: DEBUG oslo_concurrency.lockutils [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Releasing lock "refresh_cache-6f2bc97b-0f0a-4f16-b41c-7af96130783f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 768.811050] env[70020]: DEBUG nova.compute.manager [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Instance network_info: |[{"id": "c6bd89ff-30f7-46ff-b392-16a88577740a", "address": "fa:16:3e:43:f7:71", "network": {"id": "56f227bb-31f0-4ffa-af00-d53134c6fb95", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-290114732-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ec890d4297a40ba9998728c53680046", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56398cc0-e39f-410f-8036-8c2a6870e26f", "external-id": "nsx-vlan-transportzone-612", "segmentation_id": 612, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6bd89ff-30", "ovs_interfaceid": "c6bd89ff-30f7-46ff-b392-16a88577740a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 768.811383] env[70020]: DEBUG oslo_concurrency.lockutils [req-0a81368f-a849-4166-8b93-52b01a93f549 req-7c1798e7-1350-400a-a68c-78ca32bb8cc9 service nova] Acquired lock "refresh_cache-6f2bc97b-0f0a-4f16-b41c-7af96130783f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 768.811576] env[70020]: DEBUG nova.network.neutron [req-0a81368f-a849-4166-8b93-52b01a93f549 req-7c1798e7-1350-400a-a68c-78ca32bb8cc9 service nova] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Refreshing network info cache for port c6bd89ff-30f7-46ff-b392-16a88577740a {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 768.812818] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:43:f7:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56398cc0-e39f-410f-8036-8c2a6870e26f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c6bd89ff-30f7-46ff-b392-16a88577740a', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 768.820493] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Creating folder: Project (4ec890d4297a40ba9998728c53680046). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 768.820981] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d0dbc468-28d2-496c-8ec9-ae761787b82e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.833822] env[70020]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 768.834046] env[70020]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=70020) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 768.834408] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Folder already exists: Project (4ec890d4297a40ba9998728c53680046). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 768.834599] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Creating folder: Instances. Parent ref: group-v721587. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 768.835194] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6e3bdc4e-cdd7-493e-a0fc-5be604eeceee {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.844591] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Created folder: Instances in parent group-v721587. [ 768.844819] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 768.845074] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 768.845213] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-86bf1c3d-067a-4c33-87ca-565a3ce4a21f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.867026] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 768.867026] env[70020]: value = "task-3618081" [ 768.867026] env[70020]: _type = "Task" [ 768.867026] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.873340] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618081, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.957827] env[70020]: ERROR nova.scheduler.client.report [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] [req-21134c42-b4b0-4da8-856e-824254b0605b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-21134c42-b4b0-4da8-856e-824254b0605b"}]} [ 768.975105] env[70020]: DEBUG nova.scheduler.client.report [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 768.990347] env[70020]: DEBUG nova.scheduler.client.report [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 768.990647] env[70020]: DEBUG nova.compute.provider_tree [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 769.002926] env[70020]: DEBUG nova.scheduler.client.report [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 769.020673] env[70020]: DEBUG nova.scheduler.client.report [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 769.185445] env[70020]: DEBUG oslo_vmware.api [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3618078, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.178006} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.185445] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 769.185445] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0708323-71b6-4b60-9d0a-a3633f57398e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.215860] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] 738d52c6-0368-434f-a14f-05b47ca865e3/738d52c6-0368-434f-a14f-05b47ca865e3.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 769.219048] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cc78208b-cb34-4492-a864-722e596e60b1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.240651] env[70020]: DEBUG oslo_vmware.api [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Waiting for the task: (returnval){ [ 769.240651] env[70020]: value = "task-3618082" [ 769.240651] env[70020]: _type = "Task" [ 769.240651] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.252490] env[70020]: DEBUG oslo_vmware.api [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3618082, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.374655] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618081, 'name': CreateVM_Task} progress is 25%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.589325] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ceffd43-da33-4d97-b5f1-df09e0db6957 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.596809] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9ac9bd0-71f5-4eea-aa3d-86783b1fc0f1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.628625] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bd78a5d-b7ca-48ef-8367-2b1918136a07 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.637662] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb9e646b-7e05-46ea-aa23-4a794eb9b100 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.654755] env[70020]: DEBUG nova.compute.provider_tree [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 769.694593] env[70020]: DEBUG nova.network.neutron [req-0a81368f-a849-4166-8b93-52b01a93f549 req-7c1798e7-1350-400a-a68c-78ca32bb8cc9 service nova] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Updated VIF entry in instance network info cache for port c6bd89ff-30f7-46ff-b392-16a88577740a. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 769.695051] env[70020]: DEBUG nova.network.neutron [req-0a81368f-a849-4166-8b93-52b01a93f549 req-7c1798e7-1350-400a-a68c-78ca32bb8cc9 service nova] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Updating instance_info_cache with network_info: [{"id": "c6bd89ff-30f7-46ff-b392-16a88577740a", "address": "fa:16:3e:43:f7:71", "network": {"id": "56f227bb-31f0-4ffa-af00-d53134c6fb95", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-290114732-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ec890d4297a40ba9998728c53680046", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56398cc0-e39f-410f-8036-8c2a6870e26f", "external-id": "nsx-vlan-transportzone-612", "segmentation_id": 612, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6bd89ff-30", "ovs_interfaceid": "c6bd89ff-30f7-46ff-b392-16a88577740a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 769.751687] env[70020]: DEBUG oslo_vmware.api [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3618082, 'name': ReconfigVM_Task, 'duration_secs': 0.285232} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.752650] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Reconfigured VM instance instance-00000026 to attach disk [datastore1] 738d52c6-0368-434f-a14f-05b47ca865e3/738d52c6-0368-434f-a14f-05b47ca865e3.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 769.752650] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1775e4fc-9852-4ebc-8ff4-44e66a7ad907 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.758074] env[70020]: DEBUG oslo_vmware.api [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Waiting for the task: (returnval){ [ 769.758074] env[70020]: value = "task-3618083" [ 769.758074] env[70020]: _type = "Task" [ 769.758074] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.766427] env[70020]: DEBUG oslo_vmware.api [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3618083, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.875875] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618081, 'name': CreateVM_Task} progress is 25%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.966376] env[70020]: DEBUG oslo_concurrency.lockutils [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquiring lock "b53f55c1-1867-410c-9c53-f552ff30d697" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 769.966613] env[70020]: DEBUG oslo_concurrency.lockutils [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Lock "b53f55c1-1867-410c-9c53-f552ff30d697" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 770.178168] env[70020]: ERROR nova.scheduler.client.report [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] [req-eaef53f5-fd0d-4181-9618-b9c29ec04972] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-eaef53f5-fd0d-4181-9618-b9c29ec04972"}]} [ 770.196118] env[70020]: DEBUG nova.scheduler.client.report [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 770.198363] env[70020]: DEBUG oslo_concurrency.lockutils [req-0a81368f-a849-4166-8b93-52b01a93f549 req-7c1798e7-1350-400a-a68c-78ca32bb8cc9 service nova] Releasing lock "refresh_cache-6f2bc97b-0f0a-4f16-b41c-7af96130783f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 770.211831] env[70020]: DEBUG nova.scheduler.client.report [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 770.212049] env[70020]: DEBUG nova.compute.provider_tree [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 770.222979] env[70020]: DEBUG nova.scheduler.client.report [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 770.242913] env[70020]: DEBUG nova.scheduler.client.report [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 770.274505] env[70020]: DEBUG oslo_vmware.api [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3618083, 'name': Rename_Task, 'duration_secs': 0.156697} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.274771] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 770.275031] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eac69c42-9dfa-48eb-8a3b-72ae2600b266 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.283077] env[70020]: DEBUG oslo_vmware.api [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Waiting for the task: (returnval){ [ 770.283077] env[70020]: value = "task-3618084" [ 770.283077] env[70020]: _type = "Task" [ 770.283077] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.298145] env[70020]: DEBUG oslo_vmware.api [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3618084, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.383678] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618081, 'name': CreateVM_Task} progress is 25%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.794163] env[70020]: DEBUG oslo_vmware.api [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3618084, 'name': PowerOnVM_Task, 'duration_secs': 0.447196} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.795310] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 770.795511] env[70020]: INFO nova.compute.manager [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Took 7.35 seconds to spawn the instance on the hypervisor. [ 770.795874] env[70020]: DEBUG nova.compute.manager [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 770.796807] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e78a087a-9974-481b-a93d-d668c0e1bf00 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.834788] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fe8b52c-e471-4ced-b016-46f93fa996c4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.842318] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69f8ae9f-a84a-49a9-a77f-a4d51c5c3bde {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.878580] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfde4591-6146-48f3-b765-c75fbe4e8524 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.886858] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618081, 'name': CreateVM_Task} progress is 25%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.889890] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b99181d2-597e-47ab-8346-7d102f068516 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.903457] env[70020]: DEBUG nova.compute.provider_tree [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 771.321024] env[70020]: INFO nova.compute.manager [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Took 63.88 seconds to build instance. [ 771.383629] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618081, 'name': CreateVM_Task} progress is 25%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.437917] env[70020]: DEBUG nova.scheduler.client.report [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Updated inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with generation 76 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 771.438228] env[70020]: DEBUG nova.compute.provider_tree [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Updating resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d generation from 76 to 77 during operation: update_inventory {{(pid=70020) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 771.438411] env[70020]: DEBUG nova.compute.provider_tree [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 771.694240] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 771.694637] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 771.695017] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Deleting the datastore file [datastore1] b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 771.695998] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9d4b5da3-35b7-48c9-9fd6-15051039f962 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.704187] env[70020]: DEBUG oslo_vmware.api [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Waiting for the task: (returnval){ [ 771.704187] env[70020]: value = "task-3618085" [ 771.704187] env[70020]: _type = "Task" [ 771.704187] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.712977] env[70020]: DEBUG oslo_vmware.api [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Task: {'id': task-3618085, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.821629] env[70020]: DEBUG oslo_concurrency.lockutils [None req-89d25183-151e-470b-8441-0ba885579fb3 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Lock "738d52c6-0368-434f-a14f-05b47ca865e3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.289s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 771.883847] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618081, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.943764] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 5.243s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 771.944382] env[70020]: DEBUG nova.compute.manager [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 771.947161] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d435bc63-51a9-4b0b-b924-7eaf125c46a0 tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 56.122s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 771.947396] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d435bc63-51a9-4b0b-b924-7eaf125c46a0 tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 771.949579] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 53.217s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 771.952047] env[70020]: INFO nova.compute.claims [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 771.976386] env[70020]: INFO nova.scheduler.client.report [None req-d435bc63-51a9-4b0b-b924-7eaf125c46a0 tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Deleted allocations for instance a09db142-60d1-4a62-8e76-1e2e3676124f [ 772.214843] env[70020]: DEBUG oslo_vmware.api [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Task: {'id': task-3618085, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.099796} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.215129] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 772.215306] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 772.215505] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 772.215706] env[70020]: INFO nova.compute.manager [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Took 5.88 seconds to destroy the instance on the hypervisor. [ 772.215947] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 772.216300] env[70020]: DEBUG nova.compute.manager [-] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 772.216300] env[70020]: DEBUG nova.network.neutron [-] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 772.232172] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Acquiring lock "f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 772.232402] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Lock "f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 772.264941] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Acquiring lock "ef0d716a-080e-4167-bd34-b2c660b95c88" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 772.265201] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Lock "ef0d716a-080e-4167-bd34-b2c660b95c88" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 772.297857] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Acquiring lock "8bff6907-c2b0-4ad1-9298-b2d622d33fde" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 772.298477] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Lock "8bff6907-c2b0-4ad1-9298-b2d622d33fde" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 772.328963] env[70020]: DEBUG nova.compute.manager [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 772.388209] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618081, 'name': CreateVM_Task, 'duration_secs': 3.033732} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.388376] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 772.389045] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'attachment_id': 'e6aed752-f439-4134-8042-e6ab2bb7848f', 'guest_format': None, 'delete_on_termination': True, 'disk_bus': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721598', 'volume_id': 'b709f316-53b7-4e6a-a871-7ecc3270770e', 'name': 'volume-b709f316-53b7-4e6a-a871-7ecc3270770e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6f2bc97b-0f0a-4f16-b41c-7af96130783f', 'attached_at': '', 'detached_at': '', 'volume_id': 'b709f316-53b7-4e6a-a871-7ecc3270770e', 'serial': 'b709f316-53b7-4e6a-a871-7ecc3270770e'}, 'mount_device': '/dev/sda', 'boot_index': 0, 'device_type': None, 'volume_type': None}], 'swap': None} {{(pid=70020) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 772.389260] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Root volume attach. Driver type: vmdk {{(pid=70020) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 772.390056] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-789d81fd-170b-4ea5-b264-316c36b928c1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.397755] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bb63416-54c5-4382-981a-d4b2b636349f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.403106] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ebeffd2-6922-4529-8d14-e35aec1b3887 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.408514] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-1d8b136a-8c5d-4d62-b792-2aa65ada2cb0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.415584] env[70020]: DEBUG oslo_vmware.api [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Waiting for the task: (returnval){ [ 772.415584] env[70020]: value = "task-3618086" [ 772.415584] env[70020]: _type = "Task" [ 772.415584] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.422859] env[70020]: DEBUG oslo_vmware.api [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Task: {'id': task-3618086, 'name': RelocateVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.456374] env[70020]: DEBUG nova.compute.utils [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 772.462435] env[70020]: DEBUG nova.compute.manager [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 772.462700] env[70020]: DEBUG nova.network.neutron [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 772.486482] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d435bc63-51a9-4b0b-b924-7eaf125c46a0 tempest-ServersAaction247Test-189838450 tempest-ServersAaction247Test-189838450-project-member] Lock "a09db142-60d1-4a62-8e76-1e2e3676124f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 60.918s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 772.545726] env[70020]: DEBUG nova.policy [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fe6be7299f4c4b56a4f8f6143456c631', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4e518d4dd4994927bc416bc862170563', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 772.867838] env[70020]: DEBUG oslo_concurrency.lockutils [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 772.918607] env[70020]: DEBUG nova.compute.manager [req-a84d4100-aa25-46b6-8638-6405ef441734 req-5b758986-6af2-43b8-a187-5e6645438692 service nova] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Received event network-vif-deleted-2944f964-96c9-42c8-8914-3737e1a4349a {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 772.919573] env[70020]: INFO nova.compute.manager [req-a84d4100-aa25-46b6-8638-6405ef441734 req-5b758986-6af2-43b8-a187-5e6645438692 service nova] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Neutron deleted interface 2944f964-96c9-42c8-8914-3737e1a4349a; detaching it from the instance and deleting it from the info cache [ 772.919912] env[70020]: DEBUG nova.network.neutron [req-a84d4100-aa25-46b6-8638-6405ef441734 req-5b758986-6af2-43b8-a187-5e6645438692 service nova] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 772.937741] env[70020]: DEBUG oslo_vmware.api [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Task: {'id': task-3618086, 'name': RelocateVM_Task} progress is 20%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.966810] env[70020]: DEBUG nova.compute.manager [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 773.087944] env[70020]: DEBUG nova.network.neutron [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Successfully created port: 8b3dcf6d-4b21-4944-82c6-08f6ffcf6e2e {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 773.363837] env[70020]: DEBUG nova.network.neutron [-] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.432364] env[70020]: DEBUG oslo_vmware.api [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Task: {'id': task-3618086, 'name': RelocateVM_Task, 'duration_secs': 0.576082} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.432364] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a4f6e265-c923-4529-8142-2b34ecc13633 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.437784] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Volume attach. Driver type: vmdk {{(pid=70020) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 773.437902] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721598', 'volume_id': 'b709f316-53b7-4e6a-a871-7ecc3270770e', 'name': 'volume-b709f316-53b7-4e6a-a871-7ecc3270770e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6f2bc97b-0f0a-4f16-b41c-7af96130783f', 'attached_at': '', 'detached_at': '', 'volume_id': 'b709f316-53b7-4e6a-a871-7ecc3270770e', 'serial': 'b709f316-53b7-4e6a-a871-7ecc3270770e'} {{(pid=70020) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 773.438996] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9077209-2f67-4b55-a418-eb299caee4e4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.461874] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08dd4946-6001-40c6-9d33-b59e56bf9c6c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.468554] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e45aa023-cfa3-4a13-a55d-96979f42c648 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.513696] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] volume-b709f316-53b7-4e6a-a871-7ecc3270770e/volume-b709f316-53b7-4e6a-a871-7ecc3270770e.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 773.538722] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d18116a-160c-432e-9906-9aae2c53a97e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.563987] env[70020]: DEBUG nova.compute.manager [req-a84d4100-aa25-46b6-8638-6405ef441734 req-5b758986-6af2-43b8-a187-5e6645438692 service nova] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Detach interface failed, port_id=2944f964-96c9-42c8-8914-3737e1a4349a, reason: Instance b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 773.571211] env[70020]: DEBUG oslo_vmware.api [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Waiting for the task: (returnval){ [ 773.571211] env[70020]: value = "task-3618087" [ 773.571211] env[70020]: _type = "Task" [ 773.571211] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.580519] env[70020]: DEBUG oslo_vmware.api [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Task: {'id': task-3618087, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.712971] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-725e58ad-35a8-44e2-929d-9040c5028255 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.721289] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37f27111-b9cb-4a87-8221-7f0464da51e1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.752821] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-572d1d54-b7f3-4815-9199-f99fa9fcc506 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.760548] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad305cda-6d4d-43ca-9364-f95dd00b0746 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.775095] env[70020]: DEBUG nova.compute.provider_tree [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 773.865152] env[70020]: INFO nova.compute.manager [-] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Took 1.65 seconds to deallocate network for instance. [ 774.039457] env[70020]: DEBUG nova.compute.manager [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 774.070292] env[70020]: DEBUG nova.virt.hardware [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 774.070673] env[70020]: DEBUG nova.virt.hardware [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 774.070673] env[70020]: DEBUG nova.virt.hardware [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 774.070873] env[70020]: DEBUG nova.virt.hardware [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 774.071099] env[70020]: DEBUG nova.virt.hardware [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 774.071153] env[70020]: DEBUG nova.virt.hardware [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 774.071344] env[70020]: DEBUG nova.virt.hardware [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 774.071601] env[70020]: DEBUG nova.virt.hardware [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 774.072276] env[70020]: DEBUG nova.virt.hardware [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 774.072276] env[70020]: DEBUG nova.virt.hardware [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 774.072276] env[70020]: DEBUG nova.virt.hardware [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 774.073017] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-222a05ae-feb9-4225-84b9-06234e916c1e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.088755] env[70020]: DEBUG oslo_vmware.api [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Task: {'id': task-3618087, 'name': ReconfigVM_Task, 'duration_secs': 0.268264} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.089098] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Reconfigured VM instance instance-00000027 to attach disk [datastore1] volume-b709f316-53b7-4e6a-a871-7ecc3270770e/volume-b709f316-53b7-4e6a-a871-7ecc3270770e.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 774.094989] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68124284-2102-4098-ad38-3fb1330a9d44 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.099896] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8406d0af-c064-4ef4-b8a8-25d0c0b471d2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.122450] env[70020]: DEBUG oslo_vmware.api [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Waiting for the task: (returnval){ [ 774.122450] env[70020]: value = "task-3618088" [ 774.122450] env[70020]: _type = "Task" [ 774.122450] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.131108] env[70020]: DEBUG oslo_vmware.api [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Task: {'id': task-3618088, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.144445] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0602c3e1-8d09-42a1-933d-ef69b164ac49 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Acquiring lock "738d52c6-0368-434f-a14f-05b47ca865e3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 774.144445] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0602c3e1-8d09-42a1-933d-ef69b164ac49 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Lock "738d52c6-0368-434f-a14f-05b47ca865e3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 774.144445] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0602c3e1-8d09-42a1-933d-ef69b164ac49 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Acquiring lock "738d52c6-0368-434f-a14f-05b47ca865e3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 774.144601] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0602c3e1-8d09-42a1-933d-ef69b164ac49 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Lock "738d52c6-0368-434f-a14f-05b47ca865e3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 774.144706] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0602c3e1-8d09-42a1-933d-ef69b164ac49 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Lock "738d52c6-0368-434f-a14f-05b47ca865e3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 774.147052] env[70020]: INFO nova.compute.manager [None req-0602c3e1-8d09-42a1-933d-ef69b164ac49 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Terminating instance [ 774.278641] env[70020]: DEBUG nova.scheduler.client.report [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 774.419034] env[70020]: INFO nova.compute.manager [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Took 0.55 seconds to detach 1 volumes for instance. [ 774.422723] env[70020]: DEBUG nova.compute.manager [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Deleting volume: 1e88bef3-79b2-4977-918f-5c3ee9732e62 {{(pid=70020) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 774.634190] env[70020]: DEBUG oslo_vmware.api [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Task: {'id': task-3618088, 'name': ReconfigVM_Task, 'duration_secs': 0.136823} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.634539] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721598', 'volume_id': 'b709f316-53b7-4e6a-a871-7ecc3270770e', 'name': 'volume-b709f316-53b7-4e6a-a871-7ecc3270770e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6f2bc97b-0f0a-4f16-b41c-7af96130783f', 'attached_at': '', 'detached_at': '', 'volume_id': 'b709f316-53b7-4e6a-a871-7ecc3270770e', 'serial': 'b709f316-53b7-4e6a-a871-7ecc3270770e'} {{(pid=70020) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 774.635231] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d3b78c4d-7582-4c9a-8eae-e7211ec6a438 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.640862] env[70020]: DEBUG oslo_vmware.api [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Waiting for the task: (returnval){ [ 774.640862] env[70020]: value = "task-3618090" [ 774.640862] env[70020]: _type = "Task" [ 774.640862] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.650425] env[70020]: DEBUG oslo_vmware.api [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Task: {'id': task-3618090, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.652078] env[70020]: DEBUG nova.compute.manager [None req-0602c3e1-8d09-42a1-933d-ef69b164ac49 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 774.652483] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0602c3e1-8d09-42a1-933d-ef69b164ac49 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 774.653492] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eb8977b-ad0c-40e0-8f95-44e31c12d3ac {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.660652] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0602c3e1-8d09-42a1-933d-ef69b164ac49 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 774.660950] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-498f978f-7163-4695-a1c5-196ba9cbfe83 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.664990] env[70020]: DEBUG nova.network.neutron [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Successfully updated port: 8b3dcf6d-4b21-4944-82c6-08f6ffcf6e2e {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 774.667989] env[70020]: DEBUG oslo_vmware.api [None req-0602c3e1-8d09-42a1-933d-ef69b164ac49 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Waiting for the task: (returnval){ [ 774.667989] env[70020]: value = "task-3618091" [ 774.667989] env[70020]: _type = "Task" [ 774.667989] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.681136] env[70020]: DEBUG oslo_vmware.api [None req-0602c3e1-8d09-42a1-933d-ef69b164ac49 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3618091, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.783931] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.834s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 774.784490] env[70020]: DEBUG nova.compute.manager [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 774.787031] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 52.679s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 774.788814] env[70020]: INFO nova.compute.claims [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 774.802017] env[70020]: DEBUG oslo_concurrency.lockutils [None req-38d0fa03-0686-45e0-954e-c620f6e41eb8 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "8adadb2e-2a20-45b1-bed8-34e09df25f39" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 774.802017] env[70020]: DEBUG oslo_concurrency.lockutils [None req-38d0fa03-0686-45e0-954e-c620f6e41eb8 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "8adadb2e-2a20-45b1-bed8-34e09df25f39" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 774.802017] env[70020]: DEBUG nova.compute.manager [None req-38d0fa03-0686-45e0-954e-c620f6e41eb8 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 774.802017] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b10da181-dec6-43ce-8ed2-d8981b3617d2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.808277] env[70020]: DEBUG nova.compute.manager [None req-38d0fa03-0686-45e0-954e-c620f6e41eb8 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=70020) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 774.809505] env[70020]: DEBUG nova.objects.instance [None req-38d0fa03-0686-45e0-954e-c620f6e41eb8 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lazy-loading 'flavor' on Instance uuid 8adadb2e-2a20-45b1-bed8-34e09df25f39 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 774.945763] env[70020]: DEBUG nova.compute.manager [req-1218e2d3-18cb-45fb-a6e3-861b02765d50 req-be7dbd9a-8e58-436f-9f58-d7d1e8125996 service nova] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Received event network-vif-plugged-8b3dcf6d-4b21-4944-82c6-08f6ffcf6e2e {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 774.946844] env[70020]: DEBUG oslo_concurrency.lockutils [req-1218e2d3-18cb-45fb-a6e3-861b02765d50 req-be7dbd9a-8e58-436f-9f58-d7d1e8125996 service nova] Acquiring lock "ae91adc5-b3a4-4503-91f2-d803eaefedc5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 774.946844] env[70020]: DEBUG oslo_concurrency.lockutils [req-1218e2d3-18cb-45fb-a6e3-861b02765d50 req-be7dbd9a-8e58-436f-9f58-d7d1e8125996 service nova] Lock "ae91adc5-b3a4-4503-91f2-d803eaefedc5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 774.946844] env[70020]: DEBUG oslo_concurrency.lockutils [req-1218e2d3-18cb-45fb-a6e3-861b02765d50 req-be7dbd9a-8e58-436f-9f58-d7d1e8125996 service nova] Lock "ae91adc5-b3a4-4503-91f2-d803eaefedc5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 774.946844] env[70020]: DEBUG nova.compute.manager [req-1218e2d3-18cb-45fb-a6e3-861b02765d50 req-be7dbd9a-8e58-436f-9f58-d7d1e8125996 service nova] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] No waiting events found dispatching network-vif-plugged-8b3dcf6d-4b21-4944-82c6-08f6ffcf6e2e {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 774.946844] env[70020]: WARNING nova.compute.manager [req-1218e2d3-18cb-45fb-a6e3-861b02765d50 req-be7dbd9a-8e58-436f-9f58-d7d1e8125996 service nova] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Received unexpected event network-vif-plugged-8b3dcf6d-4b21-4944-82c6-08f6ffcf6e2e for instance with vm_state building and task_state spawning. [ 774.947339] env[70020]: DEBUG nova.compute.manager [req-1218e2d3-18cb-45fb-a6e3-861b02765d50 req-be7dbd9a-8e58-436f-9f58-d7d1e8125996 service nova] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Received event network-changed-8b3dcf6d-4b21-4944-82c6-08f6ffcf6e2e {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 774.947339] env[70020]: DEBUG nova.compute.manager [req-1218e2d3-18cb-45fb-a6e3-861b02765d50 req-be7dbd9a-8e58-436f-9f58-d7d1e8125996 service nova] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Refreshing instance network info cache due to event network-changed-8b3dcf6d-4b21-4944-82c6-08f6ffcf6e2e. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 774.947339] env[70020]: DEBUG oslo_concurrency.lockutils [req-1218e2d3-18cb-45fb-a6e3-861b02765d50 req-be7dbd9a-8e58-436f-9f58-d7d1e8125996 service nova] Acquiring lock "refresh_cache-ae91adc5-b3a4-4503-91f2-d803eaefedc5" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.947339] env[70020]: DEBUG oslo_concurrency.lockutils [req-1218e2d3-18cb-45fb-a6e3-861b02765d50 req-be7dbd9a-8e58-436f-9f58-d7d1e8125996 service nova] Acquired lock "refresh_cache-ae91adc5-b3a4-4503-91f2-d803eaefedc5" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 774.947581] env[70020]: DEBUG nova.network.neutron [req-1218e2d3-18cb-45fb-a6e3-861b02765d50 req-be7dbd9a-8e58-436f-9f58-d7d1e8125996 service nova] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Refreshing network info cache for port 8b3dcf6d-4b21-4944-82c6-08f6ffcf6e2e {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 774.977094] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 775.151655] env[70020]: DEBUG oslo_vmware.api [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Task: {'id': task-3618090, 'name': Rename_Task, 'duration_secs': 0.130935} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.151911] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 775.152171] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ac70de70-c97c-4119-a9a4-b6b9850b59e5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.158337] env[70020]: DEBUG oslo_vmware.api [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Waiting for the task: (returnval){ [ 775.158337] env[70020]: value = "task-3618092" [ 775.158337] env[70020]: _type = "Task" [ 775.158337] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.166008] env[70020]: DEBUG oslo_vmware.api [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Task: {'id': task-3618092, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.169709] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Acquiring lock "refresh_cache-ae91adc5-b3a4-4503-91f2-d803eaefedc5" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 775.177773] env[70020]: DEBUG oslo_vmware.api [None req-0602c3e1-8d09-42a1-933d-ef69b164ac49 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3618091, 'name': PowerOffVM_Task, 'duration_secs': 0.222046} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.178078] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0602c3e1-8d09-42a1-933d-ef69b164ac49 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 775.178281] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0602c3e1-8d09-42a1-933d-ef69b164ac49 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 775.178532] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6f6afd8c-5fd5-4ca5-9173-eefdf5956da7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.249468] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0602c3e1-8d09-42a1-933d-ef69b164ac49 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 775.250123] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0602c3e1-8d09-42a1-933d-ef69b164ac49 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 775.250123] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-0602c3e1-8d09-42a1-933d-ef69b164ac49 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Deleting the datastore file [datastore1] 738d52c6-0368-434f-a14f-05b47ca865e3 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 775.250258] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-33a8295c-f3d4-46b2-b4db-b326f40f057c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.257322] env[70020]: DEBUG oslo_vmware.api [None req-0602c3e1-8d09-42a1-933d-ef69b164ac49 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Waiting for the task: (returnval){ [ 775.257322] env[70020]: value = "task-3618094" [ 775.257322] env[70020]: _type = "Task" [ 775.257322] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.265468] env[70020]: DEBUG oslo_vmware.api [None req-0602c3e1-8d09-42a1-933d-ef69b164ac49 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3618094, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.293584] env[70020]: DEBUG nova.compute.utils [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 775.296805] env[70020]: DEBUG nova.compute.manager [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 775.297599] env[70020]: DEBUG nova.network.neutron [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 775.358747] env[70020]: DEBUG nova.policy [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cee4ead0b96e49f2a95a3d74ce424942', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a5ae2c1c42704f49854f86cca4f8a95e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 775.497281] env[70020]: DEBUG nova.network.neutron [req-1218e2d3-18cb-45fb-a6e3-861b02765d50 req-be7dbd9a-8e58-436f-9f58-d7d1e8125996 service nova] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 775.584608] env[70020]: DEBUG nova.network.neutron [req-1218e2d3-18cb-45fb-a6e3-861b02765d50 req-be7dbd9a-8e58-436f-9f58-d7d1e8125996 service nova] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.673164] env[70020]: DEBUG oslo_vmware.api [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Task: {'id': task-3618092, 'name': PowerOnVM_Task, 'duration_secs': 0.506838} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.673536] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 775.673787] env[70020]: INFO nova.compute.manager [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Took 7.83 seconds to spawn the instance on the hypervisor. [ 775.673985] env[70020]: DEBUG nova.compute.manager [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 775.675142] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3e74f6d-f707-4b2a-b57d-546c57c8eb4c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.729125] env[70020]: DEBUG nova.network.neutron [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Successfully created port: 58a7c4b8-c855-465c-8f72-5d93eccdbb81 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 775.768759] env[70020]: DEBUG oslo_vmware.api [None req-0602c3e1-8d09-42a1-933d-ef69b164ac49 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3618094, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161895} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.770757] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-0602c3e1-8d09-42a1-933d-ef69b164ac49 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 775.770757] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0602c3e1-8d09-42a1-933d-ef69b164ac49 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 775.770757] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0602c3e1-8d09-42a1-933d-ef69b164ac49 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 775.770757] env[70020]: INFO nova.compute.manager [None req-0602c3e1-8d09-42a1-933d-ef69b164ac49 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Took 1.12 seconds to destroy the instance on the hypervisor. [ 775.770757] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0602c3e1-8d09-42a1-933d-ef69b164ac49 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 775.771796] env[70020]: DEBUG nova.compute.manager [-] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 775.771796] env[70020]: DEBUG nova.network.neutron [-] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 775.799839] env[70020]: DEBUG nova.compute.manager [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 775.821050] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-38d0fa03-0686-45e0-954e-c620f6e41eb8 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 775.821050] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8d232101-a3a9-4de7-af1e-37b6b6067f7c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.836027] env[70020]: DEBUG oslo_vmware.api [None req-38d0fa03-0686-45e0-954e-c620f6e41eb8 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 775.836027] env[70020]: value = "task-3618095" [ 775.836027] env[70020]: _type = "Task" [ 775.836027] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.851509] env[70020]: DEBUG oslo_vmware.api [None req-38d0fa03-0686-45e0-954e-c620f6e41eb8 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618095, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.087596] env[70020]: DEBUG oslo_concurrency.lockutils [req-1218e2d3-18cb-45fb-a6e3-861b02765d50 req-be7dbd9a-8e58-436f-9f58-d7d1e8125996 service nova] Releasing lock "refresh_cache-ae91adc5-b3a4-4503-91f2-d803eaefedc5" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 776.088205] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Acquired lock "refresh_cache-ae91adc5-b3a4-4503-91f2-d803eaefedc5" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 776.088381] env[70020]: DEBUG nova.network.neutron [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 776.148803] env[70020]: DEBUG nova.network.neutron [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Successfully created port: ce671578-5542-4458-8bd2-c23e89aa5b61 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 776.195787] env[70020]: INFO nova.compute.manager [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Took 68.24 seconds to build instance. [ 776.354750] env[70020]: DEBUG oslo_vmware.api [None req-38d0fa03-0686-45e0-954e-c620f6e41eb8 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618095, 'name': PowerOffVM_Task, 'duration_secs': 0.272481} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.354750] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-38d0fa03-0686-45e0-954e-c620f6e41eb8 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 776.355021] env[70020]: DEBUG nova.compute.manager [None req-38d0fa03-0686-45e0-954e-c620f6e41eb8 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 776.356470] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aedc033-54b5-44ca-9701-b4df23308e69 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.439431] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc636a9d-f0cd-4aba-b847-96532a59315a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.447372] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22d34427-c3e8-4c28-8fbf-e269fdda9a7c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.478976] env[70020]: DEBUG nova.network.neutron [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Successfully created port: 5b6b00cf-55ae-4c3e-a499-95e58b106387 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 776.481555] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63846bbe-0e10-479f-a400-323946bbf65e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.489513] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dba2d070-7727-45ff-9769-11bb94dc8eea {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.502947] env[70020]: DEBUG nova.compute.provider_tree [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 776.696294] env[70020]: DEBUG nova.network.neutron [-] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 776.697819] env[70020]: DEBUG oslo_concurrency.lockutils [None req-de52e49b-103f-4164-8383-0ba10f8c18c6 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Lock "6f2bc97b-0f0a-4f16-b41c-7af96130783f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 97.143s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 776.813379] env[70020]: DEBUG nova.compute.manager [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 776.841764] env[70020]: DEBUG nova.network.neutron [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 776.847351] env[70020]: DEBUG nova.virt.hardware [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 776.847477] env[70020]: DEBUG nova.virt.hardware [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 776.847679] env[70020]: DEBUG nova.virt.hardware [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 776.847812] env[70020]: DEBUG nova.virt.hardware [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 776.847926] env[70020]: DEBUG nova.virt.hardware [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 776.849655] env[70020]: DEBUG nova.virt.hardware [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 776.849655] env[70020]: DEBUG nova.virt.hardware [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 776.849655] env[70020]: DEBUG nova.virt.hardware [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 776.849655] env[70020]: DEBUG nova.virt.hardware [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 776.849655] env[70020]: DEBUG nova.virt.hardware [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 776.849968] env[70020]: DEBUG nova.virt.hardware [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 776.850684] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a287a5a2-efd6-414c-92b9-027a8cc9f15b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.862797] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-713b0cb8-6202-4677-a6d9-90d65165bc85 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.879649] env[70020]: DEBUG oslo_concurrency.lockutils [None req-38d0fa03-0686-45e0-954e-c620f6e41eb8 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "8adadb2e-2a20-45b1-bed8-34e09df25f39" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.080s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 777.005972] env[70020]: DEBUG nova.scheduler.client.report [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 777.050847] env[70020]: DEBUG nova.network.neutron [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Updating instance_info_cache with network_info: [{"id": "8b3dcf6d-4b21-4944-82c6-08f6ffcf6e2e", "address": "fa:16:3e:a2:ed:61", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.133", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b3dcf6d-4b", "ovs_interfaceid": "8b3dcf6d-4b21-4944-82c6-08f6ffcf6e2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.071217] env[70020]: DEBUG nova.compute.manager [req-8db6b9d0-3aa9-40db-8a96-45eaf35ae24f req-abb8743e-841a-4b70-86ba-a162f4938e04 service nova] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Received event network-vif-deleted-c2d9bed7-29b7-41c5-9e01-b47d54359ea0 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 777.200381] env[70020]: INFO nova.compute.manager [-] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Took 1.43 seconds to deallocate network for instance. [ 777.201170] env[70020]: DEBUG nova.compute.manager [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] [instance: b99195a6-866e-4142-970a-42a0564889ef] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 777.362079] env[70020]: DEBUG nova.objects.instance [None req-78819a86-32c7-4561-bc8c-ff30f05f1568 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lazy-loading 'flavor' on Instance uuid 8adadb2e-2a20-45b1-bed8-34e09df25f39 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 777.514709] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.727s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 777.515120] env[70020]: DEBUG nova.compute.manager [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 777.517624] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5b28f93a-4499-4f5d-b2bc-316d52b56450 tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 52.219s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 777.518378] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5b28f93a-4499-4f5d-b2bc-316d52b56450 tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 777.520565] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c79be9a9-932f-4bf4-8968-96d29d04a08f tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 51.732s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 777.520753] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c79be9a9-932f-4bf4-8968-96d29d04a08f tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 777.526018] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 49.940s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 777.526018] env[70020]: INFO nova.compute.claims [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 777.557501] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Releasing lock "refresh_cache-ae91adc5-b3a4-4503-91f2-d803eaefedc5" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 777.557501] env[70020]: DEBUG nova.compute.manager [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Instance network_info: |[{"id": "8b3dcf6d-4b21-4944-82c6-08f6ffcf6e2e", "address": "fa:16:3e:a2:ed:61", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.133", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b3dcf6d-4b", "ovs_interfaceid": "8b3dcf6d-4b21-4944-82c6-08f6ffcf6e2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 777.557829] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a2:ed:61', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '822050c7-1845-485d-b87e-73778d21c33c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8b3dcf6d-4b21-4944-82c6-08f6ffcf6e2e', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 777.568658] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Creating folder: Project (4e518d4dd4994927bc416bc862170563). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 777.569142] env[70020]: INFO nova.scheduler.client.report [None req-5b28f93a-4499-4f5d-b2bc-316d52b56450 tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Deleted allocations for instance 6c36df58-3ab3-4595-b89c-9ab5a4664eec [ 777.572937] env[70020]: INFO nova.scheduler.client.report [None req-c79be9a9-932f-4bf4-8968-96d29d04a08f tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Deleted allocations for instance 19036f6f-2ee3-4ea5-82fa-b510bf903922 [ 777.574274] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-70fe3d00-e1eb-4b41-a673-e60ccd7b123b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.595968] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Created folder: Project (4e518d4dd4994927bc416bc862170563) in parent group-v721521. [ 777.596181] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Creating folder: Instances. Parent ref: group-v721650. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 777.596417] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0f5e24eb-f23d-47c2-bc2a-480c86c0188a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.606283] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Created folder: Instances in parent group-v721650. [ 777.606512] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 777.606697] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 777.606901] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bf8b061b-50df-4a40-9444-4eb9143ffbca {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.628229] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 777.628229] env[70020]: value = "task-3618098" [ 777.628229] env[70020]: _type = "Task" [ 777.628229] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.638954] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618098, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.712988] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0602c3e1-8d09-42a1-933d-ef69b164ac49 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 777.737613] env[70020]: DEBUG oslo_concurrency.lockutils [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 777.870211] env[70020]: DEBUG oslo_concurrency.lockutils [None req-78819a86-32c7-4561-bc8c-ff30f05f1568 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "refresh_cache-8adadb2e-2a20-45b1-bed8-34e09df25f39" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.870211] env[70020]: DEBUG oslo_concurrency.lockutils [None req-78819a86-32c7-4561-bc8c-ff30f05f1568 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquired lock "refresh_cache-8adadb2e-2a20-45b1-bed8-34e09df25f39" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 777.870211] env[70020]: DEBUG nova.network.neutron [None req-78819a86-32c7-4561-bc8c-ff30f05f1568 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 777.870211] env[70020]: DEBUG nova.objects.instance [None req-78819a86-32c7-4561-bc8c-ff30f05f1568 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lazy-loading 'info_cache' on Instance uuid 8adadb2e-2a20-45b1-bed8-34e09df25f39 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 778.029316] env[70020]: DEBUG nova.compute.utils [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 778.036305] env[70020]: DEBUG nova.compute.manager [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 778.036305] env[70020]: DEBUG nova.network.neutron [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 778.092599] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5b28f93a-4499-4f5d-b2bc-316d52b56450 tempest-ImagesNegativeTestJSON-2089407727 tempest-ImagesNegativeTestJSON-2089407727-project-member] Lock "6c36df58-3ab3-4595-b89c-9ab5a4664eec" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 56.306s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 778.094505] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c79be9a9-932f-4bf4-8968-96d29d04a08f tempest-AttachInterfacesV270Test-1942387806 tempest-AttachInterfacesV270Test-1942387806-project-member] Lock "19036f6f-2ee3-4ea5-82fa-b510bf903922" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 55.959s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 778.135548] env[70020]: DEBUG nova.policy [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '71a1dc8214b042b28a551bfd8444e0c6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c2a0a96b236e4a7c8f6878d0becfc66b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 778.146024] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618098, 'name': CreateVM_Task, 'duration_secs': 0.349105} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.146024] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 778.146024] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.146024] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 778.146024] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 778.146024] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41468410-eb5b-4262-84c9-fc6e372182fa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.157290] env[70020]: DEBUG oslo_vmware.api [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Waiting for the task: (returnval){ [ 778.157290] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]529538bb-8f18-9f8d-7248-7af1fe37dfc1" [ 778.157290] env[70020]: _type = "Task" [ 778.157290] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.172822] env[70020]: DEBUG oslo_vmware.api [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]529538bb-8f18-9f8d-7248-7af1fe37dfc1, 'name': SearchDatastore_Task, 'duration_secs': 0.013768} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.173582] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 778.174255] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 778.174742] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.175129] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 778.175533] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 778.176034] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5fa15e05-c263-42c5-8b6a-9589642dc05a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.188022] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 778.188022] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 778.188022] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55609a85-d3e2-492d-975c-3f2fcc3b2da8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.194928] env[70020]: DEBUG oslo_vmware.api [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Waiting for the task: (returnval){ [ 778.194928] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52663257-ba42-59b3-6068-2b2ef7242f84" [ 778.194928] env[70020]: _type = "Task" [ 778.194928] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.208077] env[70020]: DEBUG oslo_vmware.api [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52663257-ba42-59b3-6068-2b2ef7242f84, 'name': SearchDatastore_Task, 'duration_secs': 0.008969} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.209357] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d469a75-ec6c-4d47-b57a-2260de2f7fcb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.217119] env[70020]: DEBUG oslo_vmware.api [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Waiting for the task: (returnval){ [ 778.217119] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52057cd5-5644-3b42-6f17-782cd85c47d6" [ 778.217119] env[70020]: _type = "Task" [ 778.217119] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.225844] env[70020]: DEBUG oslo_vmware.api [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52057cd5-5644-3b42-6f17-782cd85c47d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.240111] env[70020]: DEBUG nova.compute.manager [req-a815f732-2aa8-480c-86df-bd5db9fc0469 req-f0806495-5017-4076-aac2-576d176622c5 service nova] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Received event network-changed-c6bd89ff-30f7-46ff-b392-16a88577740a {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 778.240111] env[70020]: DEBUG nova.compute.manager [req-a815f732-2aa8-480c-86df-bd5db9fc0469 req-f0806495-5017-4076-aac2-576d176622c5 service nova] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Refreshing instance network info cache due to event network-changed-c6bd89ff-30f7-46ff-b392-16a88577740a. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 778.242642] env[70020]: DEBUG oslo_concurrency.lockutils [req-a815f732-2aa8-480c-86df-bd5db9fc0469 req-f0806495-5017-4076-aac2-576d176622c5 service nova] Acquiring lock "refresh_cache-6f2bc97b-0f0a-4f16-b41c-7af96130783f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.242642] env[70020]: DEBUG oslo_concurrency.lockutils [req-a815f732-2aa8-480c-86df-bd5db9fc0469 req-f0806495-5017-4076-aac2-576d176622c5 service nova] Acquired lock "refresh_cache-6f2bc97b-0f0a-4f16-b41c-7af96130783f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 778.242642] env[70020]: DEBUG nova.network.neutron [req-a815f732-2aa8-480c-86df-bd5db9fc0469 req-f0806495-5017-4076-aac2-576d176622c5 service nova] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Refreshing network info cache for port c6bd89ff-30f7-46ff-b392-16a88577740a {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 778.373213] env[70020]: DEBUG nova.objects.base [None req-78819a86-32c7-4561-bc8c-ff30f05f1568 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Object Instance<8adadb2e-2a20-45b1-bed8-34e09df25f39> lazy-loaded attributes: flavor,info_cache {{(pid=70020) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 778.543820] env[70020]: DEBUG nova.compute.manager [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 778.731292] env[70020]: DEBUG oslo_vmware.api [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52057cd5-5644-3b42-6f17-782cd85c47d6, 'name': SearchDatastore_Task, 'duration_secs': 0.009804} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.731800] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 778.732121] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] ae91adc5-b3a4-4503-91f2-d803eaefedc5/ae91adc5-b3a4-4503-91f2-d803eaefedc5.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 778.732903] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-14160d69-5150-47c3-8696-bcf2a04486b7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.741354] env[70020]: DEBUG oslo_vmware.api [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Waiting for the task: (returnval){ [ 778.741354] env[70020]: value = "task-3618099" [ 778.741354] env[70020]: _type = "Task" [ 778.741354] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.757496] env[70020]: DEBUG oslo_vmware.api [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Task: {'id': task-3618099, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.871745] env[70020]: DEBUG nova.network.neutron [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Successfully updated port: 58a7c4b8-c855-465c-8f72-5d93eccdbb81 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 779.081183] env[70020]: DEBUG nova.network.neutron [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Successfully created port: 2a10027e-1a93-40ca-a079-297eb6af7618 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 779.160609] env[70020]: DEBUG nova.network.neutron [req-a815f732-2aa8-480c-86df-bd5db9fc0469 req-f0806495-5017-4076-aac2-576d176622c5 service nova] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Updated VIF entry in instance network info cache for port c6bd89ff-30f7-46ff-b392-16a88577740a. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 779.160984] env[70020]: DEBUG nova.network.neutron [req-a815f732-2aa8-480c-86df-bd5db9fc0469 req-f0806495-5017-4076-aac2-576d176622c5 service nova] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Updating instance_info_cache with network_info: [{"id": "c6bd89ff-30f7-46ff-b392-16a88577740a", "address": "fa:16:3e:43:f7:71", "network": {"id": "56f227bb-31f0-4ffa-af00-d53134c6fb95", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-290114732-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ec890d4297a40ba9998728c53680046", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56398cc0-e39f-410f-8036-8c2a6870e26f", "external-id": "nsx-vlan-transportzone-612", "segmentation_id": 612, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6bd89ff-30", "ovs_interfaceid": "c6bd89ff-30f7-46ff-b392-16a88577740a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 779.198198] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd455084-7fa5-49a5-8770-5e3d2761112b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.216295] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e39a8c71-58ac-4997-876a-69111dd03d80 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.255879] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05dda816-e641-46b5-a779-6bed8a20356c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.265193] env[70020]: DEBUG oslo_vmware.api [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Task: {'id': task-3618099, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.520747} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.269771] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] ae91adc5-b3a4-4503-91f2-d803eaefedc5/ae91adc5-b3a4-4503-91f2-d803eaefedc5.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 779.269771] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 779.269771] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bbad7506-944a-41c8-a54c-54cb4a5d266d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.271509] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01d7e4bb-04aa-41eb-adf0-21ec992c22fb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.292220] env[70020]: DEBUG nova.compute.manager [req-598e1c63-098b-42d8-8e4e-13c2a37c4ce4 req-e1389d39-206f-4a0e-8964-b237fca13984 service nova] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Received event network-vif-plugged-58a7c4b8-c855-465c-8f72-5d93eccdbb81 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 779.292466] env[70020]: DEBUG oslo_concurrency.lockutils [req-598e1c63-098b-42d8-8e4e-13c2a37c4ce4 req-e1389d39-206f-4a0e-8964-b237fca13984 service nova] Acquiring lock "0add6226-3b90-4991-8f2b-81c35e72a7df-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 779.292708] env[70020]: DEBUG oslo_concurrency.lockutils [req-598e1c63-098b-42d8-8e4e-13c2a37c4ce4 req-e1389d39-206f-4a0e-8964-b237fca13984 service nova] Lock "0add6226-3b90-4991-8f2b-81c35e72a7df-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 779.292878] env[70020]: DEBUG oslo_concurrency.lockutils [req-598e1c63-098b-42d8-8e4e-13c2a37c4ce4 req-e1389d39-206f-4a0e-8964-b237fca13984 service nova] Lock "0add6226-3b90-4991-8f2b-81c35e72a7df-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 779.293352] env[70020]: DEBUG nova.compute.manager [req-598e1c63-098b-42d8-8e4e-13c2a37c4ce4 req-e1389d39-206f-4a0e-8964-b237fca13984 service nova] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] No waiting events found dispatching network-vif-plugged-58a7c4b8-c855-465c-8f72-5d93eccdbb81 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 779.293590] env[70020]: WARNING nova.compute.manager [req-598e1c63-098b-42d8-8e4e-13c2a37c4ce4 req-e1389d39-206f-4a0e-8964-b237fca13984 service nova] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Received unexpected event network-vif-plugged-58a7c4b8-c855-465c-8f72-5d93eccdbb81 for instance with vm_state building and task_state spawning. [ 779.293922] env[70020]: DEBUG nova.compute.manager [req-598e1c63-098b-42d8-8e4e-13c2a37c4ce4 req-e1389d39-206f-4a0e-8964-b237fca13984 service nova] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Received event network-changed-58a7c4b8-c855-465c-8f72-5d93eccdbb81 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 779.294167] env[70020]: DEBUG nova.compute.manager [req-598e1c63-098b-42d8-8e4e-13c2a37c4ce4 req-e1389d39-206f-4a0e-8964-b237fca13984 service nova] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Refreshing instance network info cache due to event network-changed-58a7c4b8-c855-465c-8f72-5d93eccdbb81. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 779.294369] env[70020]: DEBUG oslo_concurrency.lockutils [req-598e1c63-098b-42d8-8e4e-13c2a37c4ce4 req-e1389d39-206f-4a0e-8964-b237fca13984 service nova] Acquiring lock "refresh_cache-0add6226-3b90-4991-8f2b-81c35e72a7df" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.294512] env[70020]: DEBUG oslo_concurrency.lockutils [req-598e1c63-098b-42d8-8e4e-13c2a37c4ce4 req-e1389d39-206f-4a0e-8964-b237fca13984 service nova] Acquired lock "refresh_cache-0add6226-3b90-4991-8f2b-81c35e72a7df" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 779.295010] env[70020]: DEBUG nova.network.neutron [req-598e1c63-098b-42d8-8e4e-13c2a37c4ce4 req-e1389d39-206f-4a0e-8964-b237fca13984 service nova] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Refreshing network info cache for port 58a7c4b8-c855-465c-8f72-5d93eccdbb81 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 779.296432] env[70020]: DEBUG nova.compute.provider_tree [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 779.299996] env[70020]: DEBUG oslo_vmware.api [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Waiting for the task: (returnval){ [ 779.299996] env[70020]: value = "task-3618100" [ 779.299996] env[70020]: _type = "Task" [ 779.299996] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.309951] env[70020]: DEBUG oslo_vmware.api [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Task: {'id': task-3618100, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.554400] env[70020]: DEBUG nova.compute.manager [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 779.563092] env[70020]: DEBUG nova.network.neutron [None req-78819a86-32c7-4561-bc8c-ff30f05f1568 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Updating instance_info_cache with network_info: [{"id": "4b681dd6-fab3-4812-988e-26b219b6c5c3", "address": "fa:16:3e:29:86:67", "network": {"id": "ba2c4d3c-4191-4de5-8533-90ca5dfc1dc0", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-599216784-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e3eae740ef84ef88aef113ed4d6e57b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b681dd6-fa", "ovs_interfaceid": "4b681dd6-fab3-4812-988e-26b219b6c5c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 779.598031] env[70020]: DEBUG nova.virt.hardware [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 779.598312] env[70020]: DEBUG nova.virt.hardware [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 779.600564] env[70020]: DEBUG nova.virt.hardware [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 779.600564] env[70020]: DEBUG nova.virt.hardware [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 779.600564] env[70020]: DEBUG nova.virt.hardware [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 779.600564] env[70020]: DEBUG nova.virt.hardware [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 779.600564] env[70020]: DEBUG nova.virt.hardware [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 779.601208] env[70020]: DEBUG nova.virt.hardware [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 779.601208] env[70020]: DEBUG nova.virt.hardware [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 779.601208] env[70020]: DEBUG nova.virt.hardware [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 779.601208] env[70020]: DEBUG nova.virt.hardware [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 779.601208] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39a21ddf-a283-452e-bd32-2d45acbf4e4c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.610224] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fc1fe6e-487b-4ee2-b49b-4f408f028e5f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.667842] env[70020]: DEBUG oslo_concurrency.lockutils [req-a815f732-2aa8-480c-86df-bd5db9fc0469 req-f0806495-5017-4076-aac2-576d176622c5 service nova] Releasing lock "refresh_cache-6f2bc97b-0f0a-4f16-b41c-7af96130783f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 779.801800] env[70020]: DEBUG nova.scheduler.client.report [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 779.813701] env[70020]: DEBUG oslo_vmware.api [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Task: {'id': task-3618100, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06642} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.813996] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 779.814779] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd10155c-1fee-4843-a4ba-099bfb319784 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.836725] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Reconfiguring VM instance instance-00000028 to attach disk [datastore2] ae91adc5-b3a4-4503-91f2-d803eaefedc5/ae91adc5-b3a4-4503-91f2-d803eaefedc5.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 779.837565] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-45e26311-d43f-4f0c-aa8e-ed0aa7a7e1eb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.857839] env[70020]: DEBUG oslo_vmware.api [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Waiting for the task: (returnval){ [ 779.857839] env[70020]: value = "task-3618101" [ 779.857839] env[70020]: _type = "Task" [ 779.857839] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.865296] env[70020]: DEBUG oslo_vmware.api [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Task: {'id': task-3618101, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.955008] env[70020]: DEBUG nova.network.neutron [req-598e1c63-098b-42d8-8e4e-13c2a37c4ce4 req-e1389d39-206f-4a0e-8964-b237fca13984 service nova] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 780.068146] env[70020]: DEBUG oslo_concurrency.lockutils [None req-78819a86-32c7-4561-bc8c-ff30f05f1568 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Releasing lock "refresh_cache-8adadb2e-2a20-45b1-bed8-34e09df25f39" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 780.189862] env[70020]: DEBUG nova.network.neutron [req-598e1c63-098b-42d8-8e4e-13c2a37c4ce4 req-e1389d39-206f-4a0e-8964-b237fca13984 service nova] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.309524] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.787s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 780.311016] env[70020]: DEBUG nova.compute.manager [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 780.312671] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 51.725s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 780.314444] env[70020]: INFO nova.compute.claims [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 780.373420] env[70020]: DEBUG oslo_vmware.api [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Task: {'id': task-3618101, 'name': ReconfigVM_Task, 'duration_secs': 0.284529} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.373656] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Reconfigured VM instance instance-00000028 to attach disk [datastore2] ae91adc5-b3a4-4503-91f2-d803eaefedc5/ae91adc5-b3a4-4503-91f2-d803eaefedc5.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 780.374347] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1bc8367c-ec40-4593-9caf-4d9407181762 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.382274] env[70020]: DEBUG oslo_vmware.api [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Waiting for the task: (returnval){ [ 780.382274] env[70020]: value = "task-3618102" [ 780.382274] env[70020]: _type = "Task" [ 780.382274] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.390426] env[70020]: DEBUG oslo_vmware.api [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Task: {'id': task-3618102, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.693189] env[70020]: DEBUG oslo_concurrency.lockutils [req-598e1c63-098b-42d8-8e4e-13c2a37c4ce4 req-e1389d39-206f-4a0e-8964-b237fca13984 service nova] Releasing lock "refresh_cache-0add6226-3b90-4991-8f2b-81c35e72a7df" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 780.819964] env[70020]: DEBUG nova.compute.utils [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 780.826035] env[70020]: DEBUG nova.compute.manager [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 780.826035] env[70020]: DEBUG nova.network.neutron [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 780.878507] env[70020]: DEBUG nova.policy [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1ebabdad8aa843f28165fcd167382c60', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cfa7d3b1f5a14c60b19cde5030c2f0a2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 780.893399] env[70020]: DEBUG oslo_vmware.api [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Task: {'id': task-3618102, 'name': Rename_Task, 'duration_secs': 0.145336} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.893667] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 780.893907] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b21e2575-7984-4a97-81b4-5d24463284e8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.900971] env[70020]: DEBUG oslo_vmware.api [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Waiting for the task: (returnval){ [ 780.900971] env[70020]: value = "task-3618103" [ 780.900971] env[70020]: _type = "Task" [ 780.900971] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.910704] env[70020]: DEBUG oslo_vmware.api [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Task: {'id': task-3618103, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.076609] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-78819a86-32c7-4561-bc8c-ff30f05f1568 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 781.076958] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a82fa696-a9ea-48dd-92f1-41e07b91f665 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.084623] env[70020]: DEBUG oslo_vmware.api [None req-78819a86-32c7-4561-bc8c-ff30f05f1568 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 781.084623] env[70020]: value = "task-3618104" [ 781.084623] env[70020]: _type = "Task" [ 781.084623] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.098049] env[70020]: DEBUG oslo_vmware.api [None req-78819a86-32c7-4561-bc8c-ff30f05f1568 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618104, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.187510] env[70020]: DEBUG nova.compute.manager [req-66e9b1e1-97e3-4394-88c0-f1e69828080b req-459f0db7-343d-4225-b04d-17060c7bdf9e service nova] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Received event network-vif-plugged-ce671578-5542-4458-8bd2-c23e89aa5b61 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 781.187510] env[70020]: DEBUG oslo_concurrency.lockutils [req-66e9b1e1-97e3-4394-88c0-f1e69828080b req-459f0db7-343d-4225-b04d-17060c7bdf9e service nova] Acquiring lock "0add6226-3b90-4991-8f2b-81c35e72a7df-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 781.187510] env[70020]: DEBUG oslo_concurrency.lockutils [req-66e9b1e1-97e3-4394-88c0-f1e69828080b req-459f0db7-343d-4225-b04d-17060c7bdf9e service nova] Lock "0add6226-3b90-4991-8f2b-81c35e72a7df-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 781.187510] env[70020]: DEBUG oslo_concurrency.lockutils [req-66e9b1e1-97e3-4394-88c0-f1e69828080b req-459f0db7-343d-4225-b04d-17060c7bdf9e service nova] Lock "0add6226-3b90-4991-8f2b-81c35e72a7df-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 781.187510] env[70020]: DEBUG nova.compute.manager [req-66e9b1e1-97e3-4394-88c0-f1e69828080b req-459f0db7-343d-4225-b04d-17060c7bdf9e service nova] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] No waiting events found dispatching network-vif-plugged-ce671578-5542-4458-8bd2-c23e89aa5b61 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 781.187987] env[70020]: WARNING nova.compute.manager [req-66e9b1e1-97e3-4394-88c0-f1e69828080b req-459f0db7-343d-4225-b04d-17060c7bdf9e service nova] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Received unexpected event network-vif-plugged-ce671578-5542-4458-8bd2-c23e89aa5b61 for instance with vm_state building and task_state spawning. [ 781.324793] env[70020]: DEBUG nova.compute.manager [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 781.363357] env[70020]: DEBUG nova.network.neutron [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Successfully created port: ce1440b0-008c-48c7-b1c8-61ab26650d98 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 781.413421] env[70020]: DEBUG oslo_vmware.api [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Task: {'id': task-3618103, 'name': PowerOnVM_Task, 'duration_secs': 0.475132} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.416066] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 781.416287] env[70020]: INFO nova.compute.manager [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Took 7.38 seconds to spawn the instance on the hypervisor. [ 781.416464] env[70020]: DEBUG nova.compute.manager [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 781.417540] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea6a12a0-63ef-4e30-96f7-b66a643b67f7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.475379] env[70020]: DEBUG nova.compute.manager [req-60a4e45e-1861-4f24-9d54-130fed8817df req-4297f646-ba7f-41a5-b091-5821e40dca22 service nova] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Received event network-vif-plugged-2a10027e-1a93-40ca-a079-297eb6af7618 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 781.475595] env[70020]: DEBUG oslo_concurrency.lockutils [req-60a4e45e-1861-4f24-9d54-130fed8817df req-4297f646-ba7f-41a5-b091-5821e40dca22 service nova] Acquiring lock "c9ce57f3-f9a2-40aa-b7eb-403840c34304-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 781.475798] env[70020]: DEBUG oslo_concurrency.lockutils [req-60a4e45e-1861-4f24-9d54-130fed8817df req-4297f646-ba7f-41a5-b091-5821e40dca22 service nova] Lock "c9ce57f3-f9a2-40aa-b7eb-403840c34304-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 781.476095] env[70020]: DEBUG oslo_concurrency.lockutils [req-60a4e45e-1861-4f24-9d54-130fed8817df req-4297f646-ba7f-41a5-b091-5821e40dca22 service nova] Lock "c9ce57f3-f9a2-40aa-b7eb-403840c34304-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 781.476185] env[70020]: DEBUG nova.compute.manager [req-60a4e45e-1861-4f24-9d54-130fed8817df req-4297f646-ba7f-41a5-b091-5821e40dca22 service nova] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] No waiting events found dispatching network-vif-plugged-2a10027e-1a93-40ca-a079-297eb6af7618 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 781.476281] env[70020]: WARNING nova.compute.manager [req-60a4e45e-1861-4f24-9d54-130fed8817df req-4297f646-ba7f-41a5-b091-5821e40dca22 service nova] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Received unexpected event network-vif-plugged-2a10027e-1a93-40ca-a079-297eb6af7618 for instance with vm_state building and task_state spawning. [ 781.508858] env[70020]: DEBUG nova.network.neutron [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Successfully updated port: ce671578-5542-4458-8bd2-c23e89aa5b61 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 781.575660] env[70020]: DEBUG nova.network.neutron [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Successfully updated port: 2a10027e-1a93-40ca-a079-297eb6af7618 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 781.597150] env[70020]: DEBUG oslo_vmware.api [None req-78819a86-32c7-4561-bc8c-ff30f05f1568 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618104, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.932023] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdfbd285-f2aa-4856-972c-1570f4ee2b3f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.937289] env[70020]: INFO nova.compute.manager [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Took 70.95 seconds to build instance. [ 781.941350] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e781073-cefc-48d0-9703-e5c3de88c19f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.977070] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebe98bea-5771-495a-ac07-cc250144e112 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.984953] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc76c8e1-46d9-4d9d-8ae2-03400e769e76 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.000533] env[70020]: DEBUG nova.compute.provider_tree [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 782.084038] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Acquiring lock "refresh_cache-c9ce57f3-f9a2-40aa-b7eb-403840c34304" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.084219] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Acquired lock "refresh_cache-c9ce57f3-f9a2-40aa-b7eb-403840c34304" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 782.084351] env[70020]: DEBUG nova.network.neutron [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 782.098144] env[70020]: DEBUG oslo_vmware.api [None req-78819a86-32c7-4561-bc8c-ff30f05f1568 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618104, 'name': PowerOnVM_Task, 'duration_secs': 0.723664} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.098315] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-78819a86-32c7-4561-bc8c-ff30f05f1568 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 782.098485] env[70020]: DEBUG nova.compute.manager [None req-78819a86-32c7-4561-bc8c-ff30f05f1568 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 782.099345] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40dd8286-35d6-4601-bda4-397ba3e0def2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.339900] env[70020]: DEBUG nova.compute.manager [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 782.364882] env[70020]: DEBUG nova.virt.hardware [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 782.365151] env[70020]: DEBUG nova.virt.hardware [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 782.365309] env[70020]: DEBUG nova.virt.hardware [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 782.365485] env[70020]: DEBUG nova.virt.hardware [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 782.365629] env[70020]: DEBUG nova.virt.hardware [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 782.365772] env[70020]: DEBUG nova.virt.hardware [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 782.366066] env[70020]: DEBUG nova.virt.hardware [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 782.366294] env[70020]: DEBUG nova.virt.hardware [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 782.366484] env[70020]: DEBUG nova.virt.hardware [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 782.366648] env[70020]: DEBUG nova.virt.hardware [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 782.366822] env[70020]: DEBUG nova.virt.hardware [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 782.367683] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-809e34dc-c9c4-4dc7-a6bb-8447d7c78dd6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.377081] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f0d22a7-e546-4308-9c46-d95124e34b35 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.439588] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bde01fce-1464-48f9-b3a0-6b8cf887f6d2 tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Lock "ae91adc5-b3a4-4503-91f2-d803eaefedc5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 102.431s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 782.504499] env[70020]: DEBUG nova.scheduler.client.report [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 782.728365] env[70020]: DEBUG nova.network.neutron [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 782.943471] env[70020]: DEBUG nova.compute.manager [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 782.949919] env[70020]: DEBUG nova.network.neutron [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Updating instance_info_cache with network_info: [{"id": "2a10027e-1a93-40ca-a079-297eb6af7618", "address": "fa:16:3e:3d:46:13", "network": {"id": "7e18bd4a-4d0c-4e7f-a52d-bbfb68dfe3f1", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-642722899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2a0a96b236e4a7c8f6878d0becfc66b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89f807d9-140f-4a6f-8bce-96795f9482ee", "external-id": "nsx-vlan-transportzone-762", "segmentation_id": 762, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a10027e-1a", "ovs_interfaceid": "2a10027e-1a93-40ca-a079-297eb6af7618", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.012758] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.700s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 783.013196] env[70020]: DEBUG nova.compute.manager [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 783.016334] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e08742d2-c836-433f-ad73-bfdb58dc2dae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 53.211s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 783.016558] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e08742d2-c836-433f-ad73-bfdb58dc2dae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 783.018623] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6833ed38-80d2-49ae-9f06-52612927c4a9 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 50.963s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 783.020014] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6833ed38-80d2-49ae-9f06-52612927c4a9 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 783.024326] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 50.101s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 783.025886] env[70020]: INFO nova.compute.claims [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 783.054806] env[70020]: INFO nova.scheduler.client.report [None req-e08742d2-c836-433f-ad73-bfdb58dc2dae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Deleted allocations for instance 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0 [ 783.068612] env[70020]: INFO nova.scheduler.client.report [None req-6833ed38-80d2-49ae-9f06-52612927c4a9 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Deleted allocations for instance d601179a-df77-4f2e-b8df-9185b8a485e3 [ 783.069835] env[70020]: DEBUG nova.network.neutron [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Successfully updated port: ce1440b0-008c-48c7-b1c8-61ab26650d98 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 783.133041] env[70020]: DEBUG nova.compute.manager [None req-83cdbf6e-1445-4f5d-8a7d-a98c51837ce0 tempest-ServerDiagnosticsTest-1839965932 tempest-ServerDiagnosticsTest-1839965932-project-admin] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 783.134199] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6fb81ce-c7e8-41d3-a6ce-4d6412fd2152 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.143736] env[70020]: INFO nova.compute.manager [None req-83cdbf6e-1445-4f5d-8a7d-a98c51837ce0 tempest-ServerDiagnosticsTest-1839965932 tempest-ServerDiagnosticsTest-1839965932-project-admin] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Retrieving diagnostics [ 783.145177] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d49dc9bb-a62c-49c5-9242-83329c645c0f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.457886] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Releasing lock "refresh_cache-c9ce57f3-f9a2-40aa-b7eb-403840c34304" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 783.457886] env[70020]: DEBUG nova.compute.manager [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Instance network_info: |[{"id": "2a10027e-1a93-40ca-a079-297eb6af7618", "address": "fa:16:3e:3d:46:13", "network": {"id": "7e18bd4a-4d0c-4e7f-a52d-bbfb68dfe3f1", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-642722899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2a0a96b236e4a7c8f6878d0becfc66b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89f807d9-140f-4a6f-8bce-96795f9482ee", "external-id": "nsx-vlan-transportzone-762", "segmentation_id": 762, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a10027e-1a", "ovs_interfaceid": "2a10027e-1a93-40ca-a079-297eb6af7618", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 783.458233] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3d:46:13', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89f807d9-140f-4a6f-8bce-96795f9482ee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2a10027e-1a93-40ca-a079-297eb6af7618', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 783.470706] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Creating folder: Project (c2a0a96b236e4a7c8f6878d0becfc66b). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 783.472069] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7553c19b-b1d0-4eec-bab7-f411c73226b2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.483166] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Created folder: Project (c2a0a96b236e4a7c8f6878d0becfc66b) in parent group-v721521. [ 783.483380] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Creating folder: Instances. Parent ref: group-v721653. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 783.483638] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-41b851c1-f100-4813-866c-d5df2def7823 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.487509] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 783.493666] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Created folder: Instances in parent group-v721653. [ 783.493911] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 783.494884] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 783.495312] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6987c091-a775-4a58-9ca7-69a1170783dd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.519046] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 783.519046] env[70020]: value = "task-3618107" [ 783.519046] env[70020]: _type = "Task" [ 783.519046] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.532039] env[70020]: DEBUG nova.compute.utils [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 783.535328] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618107, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.537137] env[70020]: DEBUG nova.compute.manager [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 783.537383] env[70020]: DEBUG nova.network.neutron [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 783.567122] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e08742d2-c836-433f-ad73-bfdb58dc2dae tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 56.776s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 783.577700] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "refresh_cache-c08166c5-2c31-4d40-a61c-c541924eb49c" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.577872] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquired lock "refresh_cache-c08166c5-2c31-4d40-a61c-c541924eb49c" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 783.577979] env[70020]: DEBUG nova.network.neutron [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 783.584931] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6833ed38-80d2-49ae-9f06-52612927c4a9 tempest-FloatingIPsAssociationTestJSON-1070439366 tempest-FloatingIPsAssociationTestJSON-1070439366-project-member] Lock "d601179a-df77-4f2e-b8df-9185b8a485e3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 54.917s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 783.608908] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Acquiring lock "24184767-92f7-48b3-bbad-16a596ececde" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 783.609488] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Lock "24184767-92f7-48b3-bbad-16a596ececde" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 783.625137] env[70020]: DEBUG nova.policy [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c0e22e21d3684201883adc3617ddee72', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b3a2dc07c1d447ea81ca142d80ab4210', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 783.802647] env[70020]: DEBUG nova.compute.manager [req-17d16803-3132-4623-b2e5-63d352dd3751 req-3a019448-bed2-4513-a40f-0bb24c74577b service nova] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Received event network-changed-ce671578-5542-4458-8bd2-c23e89aa5b61 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 783.803379] env[70020]: DEBUG nova.compute.manager [req-17d16803-3132-4623-b2e5-63d352dd3751 req-3a019448-bed2-4513-a40f-0bb24c74577b service nova] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Refreshing instance network info cache due to event network-changed-ce671578-5542-4458-8bd2-c23e89aa5b61. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 783.803752] env[70020]: DEBUG oslo_concurrency.lockutils [req-17d16803-3132-4623-b2e5-63d352dd3751 req-3a019448-bed2-4513-a40f-0bb24c74577b service nova] Acquiring lock "refresh_cache-0add6226-3b90-4991-8f2b-81c35e72a7df" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.804063] env[70020]: DEBUG oslo_concurrency.lockutils [req-17d16803-3132-4623-b2e5-63d352dd3751 req-3a019448-bed2-4513-a40f-0bb24c74577b service nova] Acquired lock "refresh_cache-0add6226-3b90-4991-8f2b-81c35e72a7df" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 783.804348] env[70020]: DEBUG nova.network.neutron [req-17d16803-3132-4623-b2e5-63d352dd3751 req-3a019448-bed2-4513-a40f-0bb24c74577b service nova] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Refreshing network info cache for port ce671578-5542-4458-8bd2-c23e89aa5b61 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 783.850371] env[70020]: DEBUG nova.network.neutron [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Successfully updated port: 5b6b00cf-55ae-4c3e-a499-95e58b106387 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 783.989907] env[70020]: DEBUG nova.compute.manager [req-4b20fce0-8352-4413-ac64-db9d84d1f99e req-3f8fe1f5-3cf8-4829-ab2b-61130ae36400 service nova] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Received event network-changed-2a10027e-1a93-40ca-a079-297eb6af7618 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 783.990442] env[70020]: DEBUG nova.compute.manager [req-4b20fce0-8352-4413-ac64-db9d84d1f99e req-3f8fe1f5-3cf8-4829-ab2b-61130ae36400 service nova] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Refreshing instance network info cache due to event network-changed-2a10027e-1a93-40ca-a079-297eb6af7618. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 783.990915] env[70020]: DEBUG oslo_concurrency.lockutils [req-4b20fce0-8352-4413-ac64-db9d84d1f99e req-3f8fe1f5-3cf8-4829-ab2b-61130ae36400 service nova] Acquiring lock "refresh_cache-c9ce57f3-f9a2-40aa-b7eb-403840c34304" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.993430] env[70020]: DEBUG oslo_concurrency.lockutils [req-4b20fce0-8352-4413-ac64-db9d84d1f99e req-3f8fe1f5-3cf8-4829-ab2b-61130ae36400 service nova] Acquired lock "refresh_cache-c9ce57f3-f9a2-40aa-b7eb-403840c34304" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 783.993430] env[70020]: DEBUG nova.network.neutron [req-4b20fce0-8352-4413-ac64-db9d84d1f99e req-3f8fe1f5-3cf8-4829-ab2b-61130ae36400 service nova] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Refreshing network info cache for port 2a10027e-1a93-40ca-a079-297eb6af7618 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 784.038205] env[70020]: DEBUG nova.compute.manager [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 784.046581] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618107, 'name': CreateVM_Task, 'duration_secs': 0.462318} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.048774] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 784.049560] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 784.049841] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 784.050047] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 784.050539] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61a43cbd-fdfe-47c4-a1cd-8c825c03499c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.057301] env[70020]: DEBUG oslo_vmware.api [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Waiting for the task: (returnval){ [ 784.057301] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52206570-6647-4a3d-8d15-3a0d010c3c05" [ 784.057301] env[70020]: _type = "Task" [ 784.057301] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.069813] env[70020]: DEBUG oslo_vmware.api [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52206570-6647-4a3d-8d15-3a0d010c3c05, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.158418] env[70020]: DEBUG nova.network.neutron [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 784.353895] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Acquiring lock "refresh_cache-0add6226-3b90-4991-8f2b-81c35e72a7df" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 784.381334] env[70020]: DEBUG nova.network.neutron [req-17d16803-3132-4623-b2e5-63d352dd3751 req-3a019448-bed2-4513-a40f-0bb24c74577b service nova] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 784.418922] env[70020]: DEBUG nova.network.neutron [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Successfully created port: 632c2a24-8d7d-4754-87e6-79e1f5f4b8bf {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 784.508671] env[70020]: DEBUG nova.network.neutron [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Updating instance_info_cache with network_info: [{"id": "ce1440b0-008c-48c7-b1c8-61ab26650d98", "address": "fa:16:3e:7e:e3:8c", "network": {"id": "83a3ee04-e0ff-40e7-ae51-c463add43abb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2003290189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cfa7d3b1f5a14c60b19cde5030c2f0a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce1440b0-00", "ovs_interfaceid": "ce1440b0-008c-48c7-b1c8-61ab26650d98", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.572024] env[70020]: DEBUG oslo_vmware.api [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52206570-6647-4a3d-8d15-3a0d010c3c05, 'name': SearchDatastore_Task, 'duration_secs': 0.01266} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.572024] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 784.572024] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 784.572024] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 784.572521] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 784.572521] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 784.574902] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-73cf5161-f166-4c6b-a757-6232b11f0dc3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.580349] env[70020]: DEBUG oslo_concurrency.lockutils [None req-103b08c1-83fe-416e-9a36-9f32f9cf69cd tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Acquiring lock "ae91adc5-b3a4-4503-91f2-d803eaefedc5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 784.581132] env[70020]: DEBUG oslo_concurrency.lockutils [None req-103b08c1-83fe-416e-9a36-9f32f9cf69cd tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Lock "ae91adc5-b3a4-4503-91f2-d803eaefedc5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 784.581928] env[70020]: DEBUG oslo_concurrency.lockutils [None req-103b08c1-83fe-416e-9a36-9f32f9cf69cd tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Acquiring lock "ae91adc5-b3a4-4503-91f2-d803eaefedc5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 784.581928] env[70020]: DEBUG oslo_concurrency.lockutils [None req-103b08c1-83fe-416e-9a36-9f32f9cf69cd tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Lock "ae91adc5-b3a4-4503-91f2-d803eaefedc5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 784.582270] env[70020]: DEBUG oslo_concurrency.lockutils [None req-103b08c1-83fe-416e-9a36-9f32f9cf69cd tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Lock "ae91adc5-b3a4-4503-91f2-d803eaefedc5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 784.585180] env[70020]: INFO nova.compute.manager [None req-103b08c1-83fe-416e-9a36-9f32f9cf69cd tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Terminating instance [ 784.595431] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 784.595431] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 784.598842] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14746857-711b-44d5-932b-3e414fa459f1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.605214] env[70020]: DEBUG nova.network.neutron [req-17d16803-3132-4623-b2e5-63d352dd3751 req-3a019448-bed2-4513-a40f-0bb24c74577b service nova] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.611553] env[70020]: DEBUG oslo_vmware.api [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Waiting for the task: (returnval){ [ 784.611553] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]522d2880-a818-c00f-6395-7190f612378f" [ 784.611553] env[70020]: _type = "Task" [ 784.611553] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.625886] env[70020]: DEBUG oslo_vmware.api [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]522d2880-a818-c00f-6395-7190f612378f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.707529] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a0fcd80-2c61-4d06-9c1f-6c9d396e328f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.722279] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebc5e867-7544-4311-9611-afebc8c4774e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.783508] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6e461d3-03fe-434d-b405-4cf6ff8f329a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.795450] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56b95d50-adf5-41a5-b1bb-9d15e4c6228d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.819456] env[70020]: DEBUG nova.compute.provider_tree [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 784.971927] env[70020]: DEBUG nova.network.neutron [req-4b20fce0-8352-4413-ac64-db9d84d1f99e req-3f8fe1f5-3cf8-4829-ab2b-61130ae36400 service nova] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Updated VIF entry in instance network info cache for port 2a10027e-1a93-40ca-a079-297eb6af7618. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 784.971927] env[70020]: DEBUG nova.network.neutron [req-4b20fce0-8352-4413-ac64-db9d84d1f99e req-3f8fe1f5-3cf8-4829-ab2b-61130ae36400 service nova] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Updating instance_info_cache with network_info: [{"id": "2a10027e-1a93-40ca-a079-297eb6af7618", "address": "fa:16:3e:3d:46:13", "network": {"id": "7e18bd4a-4d0c-4e7f-a52d-bbfb68dfe3f1", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-642722899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2a0a96b236e4a7c8f6878d0becfc66b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89f807d9-140f-4a6f-8bce-96795f9482ee", "external-id": "nsx-vlan-transportzone-762", "segmentation_id": 762, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a10027e-1a", "ovs_interfaceid": "2a10027e-1a93-40ca-a079-297eb6af7618", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 785.011760] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Releasing lock "refresh_cache-c08166c5-2c31-4d40-a61c-c541924eb49c" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 785.012111] env[70020]: DEBUG nova.compute.manager [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Instance network_info: |[{"id": "ce1440b0-008c-48c7-b1c8-61ab26650d98", "address": "fa:16:3e:7e:e3:8c", "network": {"id": "83a3ee04-e0ff-40e7-ae51-c463add43abb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2003290189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cfa7d3b1f5a14c60b19cde5030c2f0a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce1440b0-00", "ovs_interfaceid": "ce1440b0-008c-48c7-b1c8-61ab26650d98", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 785.012543] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7e:e3:8c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d4ef133-b6f3-41d1-add4-92a1482195cf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ce1440b0-008c-48c7-b1c8-61ab26650d98', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 785.021577] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 785.021968] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 785.022167] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eb1829a7-d701-4346-9d9a-d9be6b8f7235 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.044434] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 785.044434] env[70020]: value = "task-3618108" [ 785.044434] env[70020]: _type = "Task" [ 785.044434] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.052638] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618108, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.056362] env[70020]: DEBUG nova.compute.manager [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 785.092426] env[70020]: DEBUG nova.compute.manager [None req-103b08c1-83fe-416e-9a36-9f32f9cf69cd tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 785.092770] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-103b08c1-83fe-416e-9a36-9f32f9cf69cd tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 785.093852] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e252607f-b4f0-421e-8bd9-7818096777a8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.099033] env[70020]: DEBUG nova.virt.hardware [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 785.099248] env[70020]: DEBUG nova.virt.hardware [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 785.099615] env[70020]: DEBUG nova.virt.hardware [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 785.099615] env[70020]: DEBUG nova.virt.hardware [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 785.099721] env[70020]: DEBUG nova.virt.hardware [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 785.099864] env[70020]: DEBUG nova.virt.hardware [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 785.103624] env[70020]: DEBUG nova.virt.hardware [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 785.103624] env[70020]: DEBUG nova.virt.hardware [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 785.103624] env[70020]: DEBUG nova.virt.hardware [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 785.103624] env[70020]: DEBUG nova.virt.hardware [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 785.103624] env[70020]: DEBUG nova.virt.hardware [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 785.103931] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c70f4228-70c2-433f-90de-af1870035a24 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.109815] env[70020]: DEBUG oslo_concurrency.lockutils [req-17d16803-3132-4623-b2e5-63d352dd3751 req-3a019448-bed2-4513-a40f-0bb24c74577b service nova] Releasing lock "refresh_cache-0add6226-3b90-4991-8f2b-81c35e72a7df" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 785.109815] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-103b08c1-83fe-416e-9a36-9f32f9cf69cd tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 785.111542] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Acquired lock "refresh_cache-0add6226-3b90-4991-8f2b-81c35e72a7df" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 785.111737] env[70020]: DEBUG nova.network.neutron [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 785.113280] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0d951704-0ca6-4524-967e-ab58c552d108 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.115879] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08c2a65a-a7a4-4c2a-8ffc-a8915ba10947 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.128009] env[70020]: DEBUG oslo_vmware.api [None req-103b08c1-83fe-416e-9a36-9f32f9cf69cd tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Waiting for the task: (returnval){ [ 785.128009] env[70020]: value = "task-3618109" [ 785.128009] env[70020]: _type = "Task" [ 785.128009] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.140675] env[70020]: DEBUG oslo_vmware.api [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]522d2880-a818-c00f-6395-7190f612378f, 'name': SearchDatastore_Task, 'duration_secs': 0.031174} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.145339] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6697877-baa0-407f-a2c4-ec21b59fb1e8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.155879] env[70020]: DEBUG oslo_vmware.api [None req-103b08c1-83fe-416e-9a36-9f32f9cf69cd tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Task: {'id': task-3618109, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.157198] env[70020]: DEBUG oslo_vmware.api [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Waiting for the task: (returnval){ [ 785.157198] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52ba6ff0-6ae2-4c4e-d992-7221f3681c84" [ 785.157198] env[70020]: _type = "Task" [ 785.157198] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.167409] env[70020]: DEBUG oslo_vmware.api [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ba6ff0-6ae2-4c4e-d992-7221f3681c84, 'name': SearchDatastore_Task, 'duration_secs': 0.013128} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.167692] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 785.167940] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] c9ce57f3-f9a2-40aa-b7eb-403840c34304/c9ce57f3-f9a2-40aa-b7eb-403840c34304.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 785.168219] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-22cf482f-3ed2-4a58-bd80-a31556be5618 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.175884] env[70020]: DEBUG oslo_vmware.api [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Waiting for the task: (returnval){ [ 785.175884] env[70020]: value = "task-3618110" [ 785.175884] env[70020]: _type = "Task" [ 785.175884] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.184374] env[70020]: DEBUG oslo_vmware.api [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': task-3618110, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.323533] env[70020]: DEBUG nova.scheduler.client.report [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 785.477689] env[70020]: DEBUG oslo_concurrency.lockutils [req-4b20fce0-8352-4413-ac64-db9d84d1f99e req-3f8fe1f5-3cf8-4829-ab2b-61130ae36400 service nova] Releasing lock "refresh_cache-c9ce57f3-f9a2-40aa-b7eb-403840c34304" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 785.477689] env[70020]: DEBUG nova.compute.manager [req-4b20fce0-8352-4413-ac64-db9d84d1f99e req-3f8fe1f5-3cf8-4829-ab2b-61130ae36400 service nova] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Received event network-vif-plugged-ce1440b0-008c-48c7-b1c8-61ab26650d98 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 785.477689] env[70020]: DEBUG oslo_concurrency.lockutils [req-4b20fce0-8352-4413-ac64-db9d84d1f99e req-3f8fe1f5-3cf8-4829-ab2b-61130ae36400 service nova] Acquiring lock "c08166c5-2c31-4d40-a61c-c541924eb49c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 785.477689] env[70020]: DEBUG oslo_concurrency.lockutils [req-4b20fce0-8352-4413-ac64-db9d84d1f99e req-3f8fe1f5-3cf8-4829-ab2b-61130ae36400 service nova] Lock "c08166c5-2c31-4d40-a61c-c541924eb49c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 785.477689] env[70020]: DEBUG oslo_concurrency.lockutils [req-4b20fce0-8352-4413-ac64-db9d84d1f99e req-3f8fe1f5-3cf8-4829-ab2b-61130ae36400 service nova] Lock "c08166c5-2c31-4d40-a61c-c541924eb49c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 785.477911] env[70020]: DEBUG nova.compute.manager [req-4b20fce0-8352-4413-ac64-db9d84d1f99e req-3f8fe1f5-3cf8-4829-ab2b-61130ae36400 service nova] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] No waiting events found dispatching network-vif-plugged-ce1440b0-008c-48c7-b1c8-61ab26650d98 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 785.477911] env[70020]: WARNING nova.compute.manager [req-4b20fce0-8352-4413-ac64-db9d84d1f99e req-3f8fe1f5-3cf8-4829-ab2b-61130ae36400 service nova] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Received unexpected event network-vif-plugged-ce1440b0-008c-48c7-b1c8-61ab26650d98 for instance with vm_state building and task_state spawning. [ 785.477911] env[70020]: DEBUG nova.compute.manager [req-4b20fce0-8352-4413-ac64-db9d84d1f99e req-3f8fe1f5-3cf8-4829-ab2b-61130ae36400 service nova] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Received event network-changed-ce1440b0-008c-48c7-b1c8-61ab26650d98 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 785.477911] env[70020]: DEBUG nova.compute.manager [req-4b20fce0-8352-4413-ac64-db9d84d1f99e req-3f8fe1f5-3cf8-4829-ab2b-61130ae36400 service nova] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Refreshing instance network info cache due to event network-changed-ce1440b0-008c-48c7-b1c8-61ab26650d98. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 785.477911] env[70020]: DEBUG oslo_concurrency.lockutils [req-4b20fce0-8352-4413-ac64-db9d84d1f99e req-3f8fe1f5-3cf8-4829-ab2b-61130ae36400 service nova] Acquiring lock "refresh_cache-c08166c5-2c31-4d40-a61c-c541924eb49c" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.478091] env[70020]: DEBUG oslo_concurrency.lockutils [req-4b20fce0-8352-4413-ac64-db9d84d1f99e req-3f8fe1f5-3cf8-4829-ab2b-61130ae36400 service nova] Acquired lock "refresh_cache-c08166c5-2c31-4d40-a61c-c541924eb49c" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 785.478091] env[70020]: DEBUG nova.network.neutron [req-4b20fce0-8352-4413-ac64-db9d84d1f99e req-3f8fe1f5-3cf8-4829-ab2b-61130ae36400 service nova] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Refreshing network info cache for port ce1440b0-008c-48c7-b1c8-61ab26650d98 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 785.558815] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618108, 'name': CreateVM_Task, 'duration_secs': 0.489051} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.559616] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 785.560621] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.560892] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 785.561282] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 785.561662] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88468ded-47d8-4bac-a110-508727ca9c86 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.568059] env[70020]: DEBUG oslo_vmware.api [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 785.568059] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52b7404b-f4e6-fb3a-b2d2-79a18c80587e" [ 785.568059] env[70020]: _type = "Task" [ 785.568059] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.577195] env[70020]: DEBUG oslo_vmware.api [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b7404b-f4e6-fb3a-b2d2-79a18c80587e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.658025] env[70020]: DEBUG oslo_vmware.api [None req-103b08c1-83fe-416e-9a36-9f32f9cf69cd tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Task: {'id': task-3618109, 'name': PowerOffVM_Task, 'duration_secs': 0.205337} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.658025] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-103b08c1-83fe-416e-9a36-9f32f9cf69cd tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 785.658025] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-103b08c1-83fe-416e-9a36-9f32f9cf69cd tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 785.658379] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-79d0b6f0-3126-4e15-87fc-4dec6e40216d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.661552] env[70020]: DEBUG nova.network.neutron [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 785.689158] env[70020]: DEBUG oslo_vmware.api [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': task-3618110, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.751300] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-103b08c1-83fe-416e-9a36-9f32f9cf69cd tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 785.751619] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-103b08c1-83fe-416e-9a36-9f32f9cf69cd tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 785.751910] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-103b08c1-83fe-416e-9a36-9f32f9cf69cd tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Deleting the datastore file [datastore2] ae91adc5-b3a4-4503-91f2-d803eaefedc5 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 785.752295] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0038a9e6-45a0-4142-8606-b22341bfb8ff {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.763076] env[70020]: DEBUG oslo_vmware.api [None req-103b08c1-83fe-416e-9a36-9f32f9cf69cd tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Waiting for the task: (returnval){ [ 785.763076] env[70020]: value = "task-3618112" [ 785.763076] env[70020]: _type = "Task" [ 785.763076] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.774349] env[70020]: DEBUG oslo_vmware.api [None req-103b08c1-83fe-416e-9a36-9f32f9cf69cd tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Task: {'id': task-3618112, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.835116] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.808s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 785.835116] env[70020]: DEBUG nova.compute.manager [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 785.843036] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 46.849s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 785.843036] env[70020]: DEBUG nova.objects.instance [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Trying to apply a migration context that does not seem to be set for this instance {{(pid=70020) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 786.017271] env[70020]: DEBUG nova.compute.manager [req-412508b1-8e90-45a0-b91b-325d2a0e378b req-8f12e309-1eb7-4478-856b-422c8debfd5f service nova] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Received event network-vif-plugged-5b6b00cf-55ae-4c3e-a499-95e58b106387 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 786.017271] env[70020]: DEBUG oslo_concurrency.lockutils [req-412508b1-8e90-45a0-b91b-325d2a0e378b req-8f12e309-1eb7-4478-856b-422c8debfd5f service nova] Acquiring lock "0add6226-3b90-4991-8f2b-81c35e72a7df-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 786.017271] env[70020]: DEBUG oslo_concurrency.lockutils [req-412508b1-8e90-45a0-b91b-325d2a0e378b req-8f12e309-1eb7-4478-856b-422c8debfd5f service nova] Lock "0add6226-3b90-4991-8f2b-81c35e72a7df-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 786.017271] env[70020]: DEBUG oslo_concurrency.lockutils [req-412508b1-8e90-45a0-b91b-325d2a0e378b req-8f12e309-1eb7-4478-856b-422c8debfd5f service nova] Lock "0add6226-3b90-4991-8f2b-81c35e72a7df-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 786.017271] env[70020]: DEBUG nova.compute.manager [req-412508b1-8e90-45a0-b91b-325d2a0e378b req-8f12e309-1eb7-4478-856b-422c8debfd5f service nova] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] No waiting events found dispatching network-vif-plugged-5b6b00cf-55ae-4c3e-a499-95e58b106387 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 786.017498] env[70020]: WARNING nova.compute.manager [req-412508b1-8e90-45a0-b91b-325d2a0e378b req-8f12e309-1eb7-4478-856b-422c8debfd5f service nova] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Received unexpected event network-vif-plugged-5b6b00cf-55ae-4c3e-a499-95e58b106387 for instance with vm_state building and task_state spawning. [ 786.018046] env[70020]: DEBUG nova.compute.manager [req-412508b1-8e90-45a0-b91b-325d2a0e378b req-8f12e309-1eb7-4478-856b-422c8debfd5f service nova] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Received event network-changed-5b6b00cf-55ae-4c3e-a499-95e58b106387 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 786.018360] env[70020]: DEBUG nova.compute.manager [req-412508b1-8e90-45a0-b91b-325d2a0e378b req-8f12e309-1eb7-4478-856b-422c8debfd5f service nova] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Refreshing instance network info cache due to event network-changed-5b6b00cf-55ae-4c3e-a499-95e58b106387. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 786.018674] env[70020]: DEBUG oslo_concurrency.lockutils [req-412508b1-8e90-45a0-b91b-325d2a0e378b req-8f12e309-1eb7-4478-856b-422c8debfd5f service nova] Acquiring lock "refresh_cache-0add6226-3b90-4991-8f2b-81c35e72a7df" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.080953] env[70020]: DEBUG oslo_vmware.api [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b7404b-f4e6-fb3a-b2d2-79a18c80587e, 'name': SearchDatastore_Task, 'duration_secs': 0.057999} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.081286] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 786.081616] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 786.081875] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.082911] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 786.083167] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 786.083438] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f70596b0-98de-483f-a988-39678405514d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.095189] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 786.095400] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 786.096325] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8c9f0be-e0c1-42f5-8a36-2dd15e0f447b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.103191] env[70020]: DEBUG oslo_vmware.api [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 786.103191] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52f7aeef-c783-765f-05c1-aa53b1c9927d" [ 786.103191] env[70020]: _type = "Task" [ 786.103191] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.110766] env[70020]: DEBUG oslo_vmware.api [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52f7aeef-c783-765f-05c1-aa53b1c9927d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.188404] env[70020]: DEBUG oslo_vmware.api [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': task-3618110, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.551517} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.188650] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] c9ce57f3-f9a2-40aa-b7eb-403840c34304/c9ce57f3-f9a2-40aa-b7eb-403840c34304.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 786.188851] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 786.189109] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-756e7c17-913a-47ee-8ec2-d6e2f381071a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.197716] env[70020]: DEBUG oslo_vmware.api [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Waiting for the task: (returnval){ [ 786.197716] env[70020]: value = "task-3618113" [ 786.197716] env[70020]: _type = "Task" [ 786.197716] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.208798] env[70020]: DEBUG oslo_vmware.api [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': task-3618113, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.248938] env[70020]: DEBUG nova.network.neutron [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Successfully updated port: 632c2a24-8d7d-4754-87e6-79e1f5f4b8bf {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 786.279176] env[70020]: DEBUG oslo_vmware.api [None req-103b08c1-83fe-416e-9a36-9f32f9cf69cd tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Task: {'id': task-3618112, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.33944} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.279881] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-103b08c1-83fe-416e-9a36-9f32f9cf69cd tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 786.280409] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-103b08c1-83fe-416e-9a36-9f32f9cf69cd tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 786.280409] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-103b08c1-83fe-416e-9a36-9f32f9cf69cd tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 786.280409] env[70020]: INFO nova.compute.manager [None req-103b08c1-83fe-416e-9a36-9f32f9cf69cd tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Took 1.19 seconds to destroy the instance on the hypervisor. [ 786.280672] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-103b08c1-83fe-416e-9a36-9f32f9cf69cd tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 786.280839] env[70020]: DEBUG nova.compute.manager [-] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 786.280931] env[70020]: DEBUG nova.network.neutron [-] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 786.347562] env[70020]: DEBUG nova.compute.utils [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 786.355905] env[70020]: DEBUG nova.compute.manager [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 786.355905] env[70020]: DEBUG nova.network.neutron [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 786.404534] env[70020]: DEBUG nova.policy [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c9be1a256e3b49f7a93dad4d718d7deb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '19128323d60a4992b0a2f837317d3f04', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 786.446034] env[70020]: DEBUG nova.network.neutron [req-4b20fce0-8352-4413-ac64-db9d84d1f99e req-3f8fe1f5-3cf8-4829-ab2b-61130ae36400 service nova] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Updated VIF entry in instance network info cache for port ce1440b0-008c-48c7-b1c8-61ab26650d98. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 786.446592] env[70020]: DEBUG nova.network.neutron [req-4b20fce0-8352-4413-ac64-db9d84d1f99e req-3f8fe1f5-3cf8-4829-ab2b-61130ae36400 service nova] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Updating instance_info_cache with network_info: [{"id": "ce1440b0-008c-48c7-b1c8-61ab26650d98", "address": "fa:16:3e:7e:e3:8c", "network": {"id": "83a3ee04-e0ff-40e7-ae51-c463add43abb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2003290189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cfa7d3b1f5a14c60b19cde5030c2f0a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce1440b0-00", "ovs_interfaceid": "ce1440b0-008c-48c7-b1c8-61ab26650d98", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.488507] env[70020]: DEBUG nova.network.neutron [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Updating instance_info_cache with network_info: [{"id": "58a7c4b8-c855-465c-8f72-5d93eccdbb81", "address": "fa:16:3e:4a:98:2c", "network": {"id": "460591ee-52d0-41ce-8c13-e045fecd0dca", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1907785980", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.132", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5ae2c1c42704f49854f86cca4f8a95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90328c7b-15c4-4742-805b-755248d67029", "external-id": "nsx-vlan-transportzone-860", "segmentation_id": 860, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58a7c4b8-c8", "ovs_interfaceid": "58a7c4b8-c855-465c-8f72-5d93eccdbb81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ce671578-5542-4458-8bd2-c23e89aa5b61", "address": "fa:16:3e:bc:c2:3c", "network": {"id": "b401340a-2795-4824-846c-59bd6cfb419a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1126788092", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.219", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a5ae2c1c42704f49854f86cca4f8a95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce671578-55", "ovs_interfaceid": "ce671578-5542-4458-8bd2-c23e89aa5b61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5b6b00cf-55ae-4c3e-a499-95e58b106387", "address": "fa:16:3e:48:03:20", "network": {"id": "460591ee-52d0-41ce-8c13-e045fecd0dca", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1907785980", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.168", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5ae2c1c42704f49854f86cca4f8a95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90328c7b-15c4-4742-805b-755248d67029", "external-id": "nsx-vlan-transportzone-860", "segmentation_id": 860, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b6b00cf-55", "ovs_interfaceid": "5b6b00cf-55ae-4c3e-a499-95e58b106387", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.613363] env[70020]: DEBUG oslo_vmware.api [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52f7aeef-c783-765f-05c1-aa53b1c9927d, 'name': SearchDatastore_Task, 'duration_secs': 0.061518} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.614537] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35c71371-c409-458c-8f41-f76cd028f989 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.620644] env[70020]: DEBUG oslo_vmware.api [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 786.620644] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52795ae4-3b46-4dfd-bc48-d39eaa19b417" [ 786.620644] env[70020]: _type = "Task" [ 786.620644] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.630564] env[70020]: DEBUG oslo_vmware.api [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52795ae4-3b46-4dfd-bc48-d39eaa19b417, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.710091] env[70020]: DEBUG oslo_vmware.api [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': task-3618113, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07212} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.710091] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 786.710853] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db6741bb-ff90-410d-b52c-5f5b73c1b306 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.734029] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] c9ce57f3-f9a2-40aa-b7eb-403840c34304/c9ce57f3-f9a2-40aa-b7eb-403840c34304.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 786.734360] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a9d50d3d-f722-4f4e-9abe-a4fa7198a1c7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.751297] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquiring lock "refresh_cache-55c20886-ae10-4326-a9de-f8577f320a99" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.751621] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquired lock "refresh_cache-55c20886-ae10-4326-a9de-f8577f320a99" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 786.751621] env[70020]: DEBUG nova.network.neutron [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 786.755200] env[70020]: DEBUG oslo_vmware.api [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Waiting for the task: (returnval){ [ 786.755200] env[70020]: value = "task-3618114" [ 786.755200] env[70020]: _type = "Task" [ 786.755200] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.764318] env[70020]: DEBUG oslo_vmware.api [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': task-3618114, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.799962] env[70020]: DEBUG nova.network.neutron [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Successfully created port: 181209a0-c7c5-4fb9-ba9a-7f87cc194836 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 786.855441] env[70020]: DEBUG nova.compute.manager [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 786.858944] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6dbede33-1a7b-4206-be9c-b4aad5872b89 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.021s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 786.860099] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d46db345-f034-47f0-916e-d803f8844822 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 45.595s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 786.860453] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d46db345-f034-47f0-916e-d803f8844822 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 786.864574] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 44.969s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 786.866240] env[70020]: INFO nova.compute.claims [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 786.901783] env[70020]: INFO nova.scheduler.client.report [None req-d46db345-f034-47f0-916e-d803f8844822 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Deleted allocations for instance 301b30f6-9909-4fc9-8721-88a314e4edb4 [ 786.949711] env[70020]: DEBUG oslo_concurrency.lockutils [req-4b20fce0-8352-4413-ac64-db9d84d1f99e req-3f8fe1f5-3cf8-4829-ab2b-61130ae36400 service nova] Releasing lock "refresh_cache-c08166c5-2c31-4d40-a61c-c541924eb49c" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 786.991396] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Releasing lock "refresh_cache-0add6226-3b90-4991-8f2b-81c35e72a7df" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 786.992346] env[70020]: DEBUG nova.compute.manager [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Instance network_info: |[{"id": "58a7c4b8-c855-465c-8f72-5d93eccdbb81", "address": "fa:16:3e:4a:98:2c", "network": {"id": "460591ee-52d0-41ce-8c13-e045fecd0dca", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1907785980", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.132", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5ae2c1c42704f49854f86cca4f8a95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90328c7b-15c4-4742-805b-755248d67029", "external-id": "nsx-vlan-transportzone-860", "segmentation_id": 860, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58a7c4b8-c8", "ovs_interfaceid": "58a7c4b8-c855-465c-8f72-5d93eccdbb81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ce671578-5542-4458-8bd2-c23e89aa5b61", "address": "fa:16:3e:bc:c2:3c", "network": {"id": "b401340a-2795-4824-846c-59bd6cfb419a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1126788092", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.219", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a5ae2c1c42704f49854f86cca4f8a95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce671578-55", "ovs_interfaceid": "ce671578-5542-4458-8bd2-c23e89aa5b61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5b6b00cf-55ae-4c3e-a499-95e58b106387", "address": "fa:16:3e:48:03:20", "network": {"id": "460591ee-52d0-41ce-8c13-e045fecd0dca", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1907785980", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.168", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5ae2c1c42704f49854f86cca4f8a95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90328c7b-15c4-4742-805b-755248d67029", "external-id": "nsx-vlan-transportzone-860", "segmentation_id": 860, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b6b00cf-55", "ovs_interfaceid": "5b6b00cf-55ae-4c3e-a499-95e58b106387", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 786.992346] env[70020]: DEBUG oslo_concurrency.lockutils [req-412508b1-8e90-45a0-b91b-325d2a0e378b req-8f12e309-1eb7-4478-856b-422c8debfd5f service nova] Acquired lock "refresh_cache-0add6226-3b90-4991-8f2b-81c35e72a7df" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 786.992346] env[70020]: DEBUG nova.network.neutron [req-412508b1-8e90-45a0-b91b-325d2a0e378b req-8f12e309-1eb7-4478-856b-422c8debfd5f service nova] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Refreshing network info cache for port 5b6b00cf-55ae-4c3e-a499-95e58b106387 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 786.993553] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4a:98:2c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '90328c7b-15c4-4742-805b-755248d67029', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '58a7c4b8-c855-465c-8f72-5d93eccdbb81', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:bc:c2:3c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e547d234-640c-449b-8279-0b16f75d6627', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ce671578-5542-4458-8bd2-c23e89aa5b61', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:48:03:20', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '90328c7b-15c4-4742-805b-755248d67029', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5b6b00cf-55ae-4c3e-a499-95e58b106387', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 787.005589] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Creating folder: Project (a5ae2c1c42704f49854f86cca4f8a95e). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 787.009170] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-24d8d1e9-eb36-424a-a69a-a5e3fa6242ca {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.020357] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Created folder: Project (a5ae2c1c42704f49854f86cca4f8a95e) in parent group-v721521. [ 787.020525] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Creating folder: Instances. Parent ref: group-v721657. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 787.020791] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d6d98bfa-37ab-4953-8203-3f875058332a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.031233] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Created folder: Instances in parent group-v721657. [ 787.031481] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 787.031743] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 787.031901] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3b989bff-f755-4db9-8537-677f01626f3f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.060487] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 787.060487] env[70020]: value = "task-3618117" [ 787.060487] env[70020]: _type = "Task" [ 787.060487] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.068506] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618117, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.132410] env[70020]: DEBUG oslo_vmware.api [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52795ae4-3b46-4dfd-bc48-d39eaa19b417, 'name': SearchDatastore_Task, 'duration_secs': 0.009499} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.132696] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 787.132943] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] c08166c5-2c31-4d40-a61c-c541924eb49c/c08166c5-2c31-4d40-a61c-c541924eb49c.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 787.133218] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-72a6b40f-149a-4f1d-858e-7609f15b372b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.146184] env[70020]: DEBUG oslo_vmware.api [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 787.146184] env[70020]: value = "task-3618118" [ 787.146184] env[70020]: _type = "Task" [ 787.146184] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.154395] env[70020]: DEBUG oslo_vmware.api [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618118, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.194419] env[70020]: DEBUG nova.network.neutron [-] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.243826] env[70020]: DEBUG nova.network.neutron [req-412508b1-8e90-45a0-b91b-325d2a0e378b req-8f12e309-1eb7-4478-856b-422c8debfd5f service nova] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Updated VIF entry in instance network info cache for port 5b6b00cf-55ae-4c3e-a499-95e58b106387. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 787.244460] env[70020]: DEBUG nova.network.neutron [req-412508b1-8e90-45a0-b91b-325d2a0e378b req-8f12e309-1eb7-4478-856b-422c8debfd5f service nova] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Updating instance_info_cache with network_info: [{"id": "58a7c4b8-c855-465c-8f72-5d93eccdbb81", "address": "fa:16:3e:4a:98:2c", "network": {"id": "460591ee-52d0-41ce-8c13-e045fecd0dca", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1907785980", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.132", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5ae2c1c42704f49854f86cca4f8a95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90328c7b-15c4-4742-805b-755248d67029", "external-id": "nsx-vlan-transportzone-860", "segmentation_id": 860, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58a7c4b8-c8", "ovs_interfaceid": "58a7c4b8-c855-465c-8f72-5d93eccdbb81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ce671578-5542-4458-8bd2-c23e89aa5b61", "address": "fa:16:3e:bc:c2:3c", "network": {"id": "b401340a-2795-4824-846c-59bd6cfb419a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1126788092", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.219", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a5ae2c1c42704f49854f86cca4f8a95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce671578-55", "ovs_interfaceid": "ce671578-5542-4458-8bd2-c23e89aa5b61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5b6b00cf-55ae-4c3e-a499-95e58b106387", "address": "fa:16:3e:48:03:20", "network": {"id": "460591ee-52d0-41ce-8c13-e045fecd0dca", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1907785980", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.168", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5ae2c1c42704f49854f86cca4f8a95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90328c7b-15c4-4742-805b-755248d67029", "external-id": "nsx-vlan-transportzone-860", "segmentation_id": 860, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b6b00cf-55", "ovs_interfaceid": "5b6b00cf-55ae-4c3e-a499-95e58b106387", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.270683] env[70020]: DEBUG oslo_vmware.api [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': task-3618114, 'name': ReconfigVM_Task, 'duration_secs': 0.400786} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.270963] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Reconfigured VM instance instance-0000002a to attach disk [datastore1] c9ce57f3-f9a2-40aa-b7eb-403840c34304/c9ce57f3-f9a2-40aa-b7eb-403840c34304.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 787.271636] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-891e95f8-e1c7-426e-b275-a7fcbc5d1541 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.277219] env[70020]: DEBUG oslo_vmware.api [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Waiting for the task: (returnval){ [ 787.277219] env[70020]: value = "task-3618119" [ 787.277219] env[70020]: _type = "Task" [ 787.277219] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.287097] env[70020]: DEBUG oslo_vmware.api [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': task-3618119, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.300039] env[70020]: DEBUG nova.network.neutron [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 787.411059] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d46db345-f034-47f0-916e-d803f8844822 tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Lock "301b30f6-9909-4fc9-8721-88a314e4edb4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 49.826s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 787.493772] env[70020]: DEBUG nova.network.neutron [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Updating instance_info_cache with network_info: [{"id": "632c2a24-8d7d-4754-87e6-79e1f5f4b8bf", "address": "fa:16:3e:a5:ed:71", "network": {"id": "943dd639-07d5-4e71-8d72-01499704fb50", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-408902375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3a2dc07c1d447ea81ca142d80ab4210", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap632c2a24-8d", "ovs_interfaceid": "632c2a24-8d7d-4754-87e6-79e1f5f4b8bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.570277] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618117, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.657565] env[70020]: DEBUG oslo_vmware.api [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618118, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.49716} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.657921] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] c08166c5-2c31-4d40-a61c-c541924eb49c/c08166c5-2c31-4d40-a61c-c541924eb49c.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 787.658041] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 787.658289] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e1d5204e-d82d-4ca2-b8b4-ae5d0b44e442 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.664043] env[70020]: DEBUG oslo_vmware.api [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 787.664043] env[70020]: value = "task-3618120" [ 787.664043] env[70020]: _type = "Task" [ 787.664043] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.672867] env[70020]: DEBUG oslo_vmware.api [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618120, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.698494] env[70020]: INFO nova.compute.manager [-] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Took 1.42 seconds to deallocate network for instance. [ 787.747121] env[70020]: DEBUG oslo_concurrency.lockutils [req-412508b1-8e90-45a0-b91b-325d2a0e378b req-8f12e309-1eb7-4478-856b-422c8debfd5f service nova] Releasing lock "refresh_cache-0add6226-3b90-4991-8f2b-81c35e72a7df" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 787.786854] env[70020]: DEBUG oslo_vmware.api [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': task-3618119, 'name': Rename_Task, 'duration_secs': 0.233203} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.787137] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 787.787381] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-24031865-99d1-4808-b45f-5421e297118a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.793648] env[70020]: DEBUG oslo_vmware.api [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Waiting for the task: (returnval){ [ 787.793648] env[70020]: value = "task-3618121" [ 787.793648] env[70020]: _type = "Task" [ 787.793648] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.801144] env[70020]: DEBUG oslo_vmware.api [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': task-3618121, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.868913] env[70020]: DEBUG nova.compute.manager [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 787.897581] env[70020]: DEBUG nova.virt.hardware [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 787.897864] env[70020]: DEBUG nova.virt.hardware [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 787.898052] env[70020]: DEBUG nova.virt.hardware [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 787.898273] env[70020]: DEBUG nova.virt.hardware [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 787.898454] env[70020]: DEBUG nova.virt.hardware [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 787.898629] env[70020]: DEBUG nova.virt.hardware [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 787.898869] env[70020]: DEBUG nova.virt.hardware [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 787.899060] env[70020]: DEBUG nova.virt.hardware [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 787.899258] env[70020]: DEBUG nova.virt.hardware [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 787.899454] env[70020]: DEBUG nova.virt.hardware [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 787.899678] env[70020]: DEBUG nova.virt.hardware [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 787.900678] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dba9fc1c-50e6-4176-880a-b6450c479abe {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.911877] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4545b417-34f2-4e54-b566-7d4fa2e8f415 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.996481] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Releasing lock "refresh_cache-55c20886-ae10-4326-a9de-f8577f320a99" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 787.996661] env[70020]: DEBUG nova.compute.manager [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Instance network_info: |[{"id": "632c2a24-8d7d-4754-87e6-79e1f5f4b8bf", "address": "fa:16:3e:a5:ed:71", "network": {"id": "943dd639-07d5-4e71-8d72-01499704fb50", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-408902375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3a2dc07c1d447ea81ca142d80ab4210", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap632c2a24-8d", "ovs_interfaceid": "632c2a24-8d7d-4754-87e6-79e1f5f4b8bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 787.997094] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a5:ed:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39ab9baf-90cd-4fe2-8d56-434f8210fc19', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '632c2a24-8d7d-4754-87e6-79e1f5f4b8bf', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 788.004576] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 788.007369] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 788.008043] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-91220b7c-4ac3-41c1-b15c-dcce1eed037a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.031151] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 788.031151] env[70020]: value = "task-3618122" [ 788.031151] env[70020]: _type = "Task" [ 788.031151] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.041204] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618122, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.045162] env[70020]: DEBUG nova.compute.manager [req-345b5709-2a41-44ac-839a-a880f3877792 req-851f3994-cca7-4428-aa22-6bb83c23f2be service nova] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Received event network-vif-plugged-632c2a24-8d7d-4754-87e6-79e1f5f4b8bf {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 788.045371] env[70020]: DEBUG oslo_concurrency.lockutils [req-345b5709-2a41-44ac-839a-a880f3877792 req-851f3994-cca7-4428-aa22-6bb83c23f2be service nova] Acquiring lock "55c20886-ae10-4326-a9de-f8577f320a99-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 788.045629] env[70020]: DEBUG oslo_concurrency.lockutils [req-345b5709-2a41-44ac-839a-a880f3877792 req-851f3994-cca7-4428-aa22-6bb83c23f2be service nova] Lock "55c20886-ae10-4326-a9de-f8577f320a99-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 788.045749] env[70020]: DEBUG oslo_concurrency.lockutils [req-345b5709-2a41-44ac-839a-a880f3877792 req-851f3994-cca7-4428-aa22-6bb83c23f2be service nova] Lock "55c20886-ae10-4326-a9de-f8577f320a99-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 788.045887] env[70020]: DEBUG nova.compute.manager [req-345b5709-2a41-44ac-839a-a880f3877792 req-851f3994-cca7-4428-aa22-6bb83c23f2be service nova] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] No waiting events found dispatching network-vif-plugged-632c2a24-8d7d-4754-87e6-79e1f5f4b8bf {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 788.046068] env[70020]: WARNING nova.compute.manager [req-345b5709-2a41-44ac-839a-a880f3877792 req-851f3994-cca7-4428-aa22-6bb83c23f2be service nova] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Received unexpected event network-vif-plugged-632c2a24-8d7d-4754-87e6-79e1f5f4b8bf for instance with vm_state building and task_state spawning. [ 788.046252] env[70020]: DEBUG nova.compute.manager [req-345b5709-2a41-44ac-839a-a880f3877792 req-851f3994-cca7-4428-aa22-6bb83c23f2be service nova] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Received event network-changed-632c2a24-8d7d-4754-87e6-79e1f5f4b8bf {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 788.046405] env[70020]: DEBUG nova.compute.manager [req-345b5709-2a41-44ac-839a-a880f3877792 req-851f3994-cca7-4428-aa22-6bb83c23f2be service nova] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Refreshing instance network info cache due to event network-changed-632c2a24-8d7d-4754-87e6-79e1f5f4b8bf. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 788.046614] env[70020]: DEBUG oslo_concurrency.lockutils [req-345b5709-2a41-44ac-839a-a880f3877792 req-851f3994-cca7-4428-aa22-6bb83c23f2be service nova] Acquiring lock "refresh_cache-55c20886-ae10-4326-a9de-f8577f320a99" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.046760] env[70020]: DEBUG oslo_concurrency.lockutils [req-345b5709-2a41-44ac-839a-a880f3877792 req-851f3994-cca7-4428-aa22-6bb83c23f2be service nova] Acquired lock "refresh_cache-55c20886-ae10-4326-a9de-f8577f320a99" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 788.046915] env[70020]: DEBUG nova.network.neutron [req-345b5709-2a41-44ac-839a-a880f3877792 req-851f3994-cca7-4428-aa22-6bb83c23f2be service nova] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Refreshing network info cache for port 632c2a24-8d7d-4754-87e6-79e1f5f4b8bf {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 788.071505] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618117, 'name': CreateVM_Task, 'duration_secs': 0.51676} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.071505] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 788.072249] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.072415] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 788.072739] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 788.075247] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16e5158d-00d5-4611-a8a3-9b8ebbbf2587 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.080304] env[70020]: DEBUG oslo_vmware.api [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Waiting for the task: (returnval){ [ 788.080304] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52beca8f-df77-85c9-e8cf-17711e75b384" [ 788.080304] env[70020]: _type = "Task" [ 788.080304] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.088822] env[70020]: DEBUG oslo_vmware.api [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52beca8f-df77-85c9-e8cf-17711e75b384, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.176445] env[70020]: DEBUG oslo_vmware.api [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618120, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07302} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.179749] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 788.180886] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de2069a3-22d1-4859-9e3e-4ecc0fcb3896 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.209329] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Reconfiguring VM instance instance-0000002b to attach disk [datastore1] c08166c5-2c31-4d40-a61c-c541924eb49c/c08166c5-2c31-4d40-a61c-c541924eb49c.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 788.212103] env[70020]: DEBUG oslo_concurrency.lockutils [None req-103b08c1-83fe-416e-9a36-9f32f9cf69cd tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 788.212412] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da214717-b2e9-41f7-bc3d-078293fba3ef {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.963156] env[70020]: DEBUG nova.network.neutron [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Successfully updated port: 181209a0-c7c5-4fb9-ba9a-7f87cc194836 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 788.984051] env[70020]: DEBUG oslo_vmware.api [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': task-3618121, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.990875] env[70020]: DEBUG oslo_vmware.api [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52beca8f-df77-85c9-e8cf-17711e75b384, 'name': SearchDatastore_Task, 'duration_secs': 0.011115} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.991371] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618122, 'name': CreateVM_Task, 'duration_secs': 0.359164} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.991643] env[70020]: DEBUG oslo_vmware.api [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 788.991643] env[70020]: value = "task-3618123" [ 788.991643] env[70020]: _type = "Task" [ 788.991643] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.991893] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 788.992454] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 788.992454] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.992624] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 788.992660] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 788.992984] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 788.998028] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f36113ba-d847-4b3c-8fa6-1ee814469f8b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.998028] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.998028] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 788.998028] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 789.001391] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e35e818-4af6-4d76-80da-19251c6f120e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.012302] env[70020]: DEBUG oslo_vmware.api [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618123, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.012302] env[70020]: DEBUG oslo_vmware.api [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 789.012302] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52fa676c-fe44-76e6-c632-06898ca7f7b2" [ 789.012302] env[70020]: _type = "Task" [ 789.012302] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.012740] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 789.012910] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 789.016495] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a744921-7ec9-4de1-82d8-d5b5473fe3ba {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.035135] env[70020]: DEBUG oslo_vmware.api [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Waiting for the task: (returnval){ [ 789.035135] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52ec2d4a-47a5-0815-685e-d5a1d72e3ce0" [ 789.035135] env[70020]: _type = "Task" [ 789.035135] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.035135] env[70020]: DEBUG oslo_vmware.api [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52fa676c-fe44-76e6-c632-06898ca7f7b2, 'name': SearchDatastore_Task, 'duration_secs': 0.009112} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.036262] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 789.036500] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 789.036799] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.036972] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 789.038025] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 789.040397] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dee36f40-6167-43de-b1e9-8b13c3e78afe {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.048181] env[70020]: DEBUG oslo_vmware.api [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ec2d4a-47a5-0815-685e-d5a1d72e3ce0, 'name': SearchDatastore_Task, 'duration_secs': 0.009474} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.053101] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 789.053413] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 789.054452] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f2b55c9-662d-4707-8e7f-a6be44cf0f34 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.057641] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6480575-c38e-45e5-84f0-14b674c37e28 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.064940] env[70020]: DEBUG oslo_vmware.api [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Waiting for the task: (returnval){ [ 789.064940] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5262e5c2-ca6e-502f-4546-2de2dd239284" [ 789.064940] env[70020]: _type = "Task" [ 789.064940] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.065246] env[70020]: DEBUG oslo_vmware.api [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 789.065246] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]525665af-7082-958a-1f0d-7b09e31c5b19" [ 789.065246] env[70020]: _type = "Task" [ 789.065246] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.079336] env[70020]: DEBUG oslo_vmware.api [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5262e5c2-ca6e-502f-4546-2de2dd239284, 'name': SearchDatastore_Task, 'duration_secs': 0.010898} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.082826] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 789.083038] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 0add6226-3b90-4991-8f2b-81c35e72a7df/0add6226-3b90-4991-8f2b-81c35e72a7df.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 789.083323] env[70020]: DEBUG oslo_vmware.api [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]525665af-7082-958a-1f0d-7b09e31c5b19, 'name': SearchDatastore_Task, 'duration_secs': 0.01154} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.087641] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-312c3011-e5c3-4565-a501-7ae5bcf33ee6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.090150] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-818f056f-3767-4964-88b4-e9d4e4ba8d1a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.096104] env[70020]: DEBUG oslo_vmware.api [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 789.096104] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52229d74-a45c-c494-cd42-680bf490601b" [ 789.096104] env[70020]: _type = "Task" [ 789.096104] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.097350] env[70020]: DEBUG oslo_vmware.api [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Waiting for the task: (returnval){ [ 789.097350] env[70020]: value = "task-3618124" [ 789.097350] env[70020]: _type = "Task" [ 789.097350] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.110062] env[70020]: DEBUG oslo_vmware.api [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52229d74-a45c-c494-cd42-680bf490601b, 'name': SearchDatastore_Task, 'duration_secs': 0.009372} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.114209] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 789.114209] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 55c20886-ae10-4326-a9de-f8577f320a99/55c20886-ae10-4326-a9de-f8577f320a99.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 789.114209] env[70020]: DEBUG oslo_vmware.api [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': task-3618124, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.118026] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-618d66f9-3846-462c-afa0-e043ca3cbc6b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.120848] env[70020]: DEBUG oslo_vmware.api [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 789.120848] env[70020]: value = "task-3618125" [ 789.120848] env[70020]: _type = "Task" [ 789.120848] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.131116] env[70020]: DEBUG oslo_vmware.api [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618125, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.238943] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dab3cb1f-f1ef-4cd9-9263-4b0dc3981a9a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.250299] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15e9eec6-58f5-46ac-ae1b-3d3dcfe26e50 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.288439] env[70020]: DEBUG nova.network.neutron [req-345b5709-2a41-44ac-839a-a880f3877792 req-851f3994-cca7-4428-aa22-6bb83c23f2be service nova] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Updated VIF entry in instance network info cache for port 632c2a24-8d7d-4754-87e6-79e1f5f4b8bf. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 789.288918] env[70020]: DEBUG nova.network.neutron [req-345b5709-2a41-44ac-839a-a880f3877792 req-851f3994-cca7-4428-aa22-6bb83c23f2be service nova] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Updating instance_info_cache with network_info: [{"id": "632c2a24-8d7d-4754-87e6-79e1f5f4b8bf", "address": "fa:16:3e:a5:ed:71", "network": {"id": "943dd639-07d5-4e71-8d72-01499704fb50", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-408902375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3a2dc07c1d447ea81ca142d80ab4210", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap632c2a24-8d", "ovs_interfaceid": "632c2a24-8d7d-4754-87e6-79e1f5f4b8bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.294038] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ccae818-d9b7-4bf8-821b-a6bb2f61de0c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.301288] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fee2095-a7e4-4060-af69-e670dd82aaba {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.318759] env[70020]: DEBUG nova.compute.provider_tree [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 789.470133] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquiring lock "refresh_cache-c56279e2-0fc6-4546-854c-82e5fda0e7a7" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.470246] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquired lock "refresh_cache-c56279e2-0fc6-4546-854c-82e5fda0e7a7" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 789.470498] env[70020]: DEBUG nova.network.neutron [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 789.479704] env[70020]: DEBUG oslo_vmware.api [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': task-3618121, 'name': PowerOnVM_Task, 'duration_secs': 1.494648} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.480992] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 789.481307] env[70020]: INFO nova.compute.manager [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Took 9.93 seconds to spawn the instance on the hypervisor. [ 789.481581] env[70020]: DEBUG nova.compute.manager [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 789.483137] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3b4b504-5b4c-4a9c-85d5-4506c035344b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.505011] env[70020]: DEBUG oslo_vmware.api [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618123, 'name': ReconfigVM_Task, 'duration_secs': 0.293327} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.505423] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Reconfigured VM instance instance-0000002b to attach disk [datastore1] c08166c5-2c31-4d40-a61c-c541924eb49c/c08166c5-2c31-4d40-a61c-c541924eb49c.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 789.506209] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-96c0e0b7-fa18-455f-9945-0ffe4af74eff {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.513902] env[70020]: DEBUG oslo_vmware.api [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 789.513902] env[70020]: value = "task-3618126" [ 789.513902] env[70020]: _type = "Task" [ 789.513902] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.523951] env[70020]: DEBUG oslo_vmware.api [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618126, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.614508] env[70020]: DEBUG oslo_vmware.api [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': task-3618124, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.635580] env[70020]: DEBUG oslo_vmware.api [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618125, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.795216] env[70020]: DEBUG oslo_concurrency.lockutils [req-345b5709-2a41-44ac-839a-a880f3877792 req-851f3994-cca7-4428-aa22-6bb83c23f2be service nova] Releasing lock "refresh_cache-55c20886-ae10-4326-a9de-f8577f320a99" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 789.795608] env[70020]: DEBUG nova.compute.manager [req-345b5709-2a41-44ac-839a-a880f3877792 req-851f3994-cca7-4428-aa22-6bb83c23f2be service nova] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Received event network-vif-deleted-8b3dcf6d-4b21-4944-82c6-08f6ffcf6e2e {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 789.823351] env[70020]: DEBUG nova.scheduler.client.report [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 790.003997] env[70020]: DEBUG nova.network.neutron [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 790.008372] env[70020]: INFO nova.compute.manager [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Took 67.92 seconds to build instance. [ 790.023155] env[70020]: DEBUG oslo_vmware.api [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618126, 'name': Rename_Task, 'duration_secs': 0.17407} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.025933] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 790.026442] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f9ece013-8f98-41a8-b498-2cc68e1857c3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.033364] env[70020]: DEBUG oslo_vmware.api [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 790.033364] env[70020]: value = "task-3618127" [ 790.033364] env[70020]: _type = "Task" [ 790.033364] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.040749] env[70020]: DEBUG oslo_vmware.api [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618127, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.073150] env[70020]: DEBUG nova.compute.manager [req-eca18ddb-2d9b-4dd5-bc2c-7e4455c0f51f req-6b52805f-20f8-498f-8e10-ea8d5f96f645 service nova] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Received event network-vif-plugged-181209a0-c7c5-4fb9-ba9a-7f87cc194836 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 790.073439] env[70020]: DEBUG oslo_concurrency.lockutils [req-eca18ddb-2d9b-4dd5-bc2c-7e4455c0f51f req-6b52805f-20f8-498f-8e10-ea8d5f96f645 service nova] Acquiring lock "c56279e2-0fc6-4546-854c-82e5fda0e7a7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 790.073650] env[70020]: DEBUG oslo_concurrency.lockutils [req-eca18ddb-2d9b-4dd5-bc2c-7e4455c0f51f req-6b52805f-20f8-498f-8e10-ea8d5f96f645 service nova] Lock "c56279e2-0fc6-4546-854c-82e5fda0e7a7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 790.073883] env[70020]: DEBUG oslo_concurrency.lockutils [req-eca18ddb-2d9b-4dd5-bc2c-7e4455c0f51f req-6b52805f-20f8-498f-8e10-ea8d5f96f645 service nova] Lock "c56279e2-0fc6-4546-854c-82e5fda0e7a7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 790.074106] env[70020]: DEBUG nova.compute.manager [req-eca18ddb-2d9b-4dd5-bc2c-7e4455c0f51f req-6b52805f-20f8-498f-8e10-ea8d5f96f645 service nova] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] No waiting events found dispatching network-vif-plugged-181209a0-c7c5-4fb9-ba9a-7f87cc194836 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 790.074293] env[70020]: WARNING nova.compute.manager [req-eca18ddb-2d9b-4dd5-bc2c-7e4455c0f51f req-6b52805f-20f8-498f-8e10-ea8d5f96f645 service nova] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Received unexpected event network-vif-plugged-181209a0-c7c5-4fb9-ba9a-7f87cc194836 for instance with vm_state building and task_state spawning. [ 790.074540] env[70020]: DEBUG nova.compute.manager [req-eca18ddb-2d9b-4dd5-bc2c-7e4455c0f51f req-6b52805f-20f8-498f-8e10-ea8d5f96f645 service nova] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Received event network-changed-181209a0-c7c5-4fb9-ba9a-7f87cc194836 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 790.074672] env[70020]: DEBUG nova.compute.manager [req-eca18ddb-2d9b-4dd5-bc2c-7e4455c0f51f req-6b52805f-20f8-498f-8e10-ea8d5f96f645 service nova] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Refreshing instance network info cache due to event network-changed-181209a0-c7c5-4fb9-ba9a-7f87cc194836. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 790.074836] env[70020]: DEBUG oslo_concurrency.lockutils [req-eca18ddb-2d9b-4dd5-bc2c-7e4455c0f51f req-6b52805f-20f8-498f-8e10-ea8d5f96f645 service nova] Acquiring lock "refresh_cache-c56279e2-0fc6-4546-854c-82e5fda0e7a7" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.110763] env[70020]: DEBUG oslo_vmware.api [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': task-3618124, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.615156} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.111017] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 0add6226-3b90-4991-8f2b-81c35e72a7df/0add6226-3b90-4991-8f2b-81c35e72a7df.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 790.111239] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 790.111481] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-95cd8fd5-79c2-4cb3-8d96-1421ee041f65 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.119949] env[70020]: DEBUG oslo_vmware.api [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Waiting for the task: (returnval){ [ 790.119949] env[70020]: value = "task-3618128" [ 790.119949] env[70020]: _type = "Task" [ 790.119949] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.131820] env[70020]: DEBUG oslo_vmware.api [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': task-3618128, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.135747] env[70020]: DEBUG oslo_vmware.api [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618125, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.528367} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.135991] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 55c20886-ae10-4326-a9de-f8577f320a99/55c20886-ae10-4326-a9de-f8577f320a99.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 790.136262] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 790.136476] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6ac27938-348b-4387-9f07-b3b759ddc719 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.144461] env[70020]: DEBUG oslo_vmware.api [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 790.144461] env[70020]: value = "task-3618129" [ 790.144461] env[70020]: _type = "Task" [ 790.144461] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.153163] env[70020]: DEBUG oslo_vmware.api [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618129, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.167188] env[70020]: DEBUG nova.network.neutron [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Updating instance_info_cache with network_info: [{"id": "181209a0-c7c5-4fb9-ba9a-7f87cc194836", "address": "fa:16:3e:b4:12:47", "network": {"id": "43f80db7-dce1-4f89-90ff-8ba5981812e4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-604035824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "19128323d60a4992b0a2f837317d3f04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap181209a0-c7", "ovs_interfaceid": "181209a0-c7c5-4fb9-ba9a-7f87cc194836", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.328820] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.464s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 790.329672] env[70020]: DEBUG nova.compute.manager [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 790.333101] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6dcabf2c-e104-4a77-8182-e9651d1bbd28 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 46.013s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 790.333512] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6dcabf2c-e104-4a77-8182-e9651d1bbd28 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 790.335985] env[70020]: DEBUG oslo_concurrency.lockutils [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.703s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 790.338064] env[70020]: INFO nova.compute.claims [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 790.365229] env[70020]: INFO nova.scheduler.client.report [None req-6dcabf2c-e104-4a77-8182-e9651d1bbd28 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Deleted allocations for instance 61bea079-9731-48d1-b472-b30226a0b5a1 [ 790.511437] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fcc6b87f-d701-4e73-8f4e-1d75a611da70 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Lock "c9ce57f3-f9a2-40aa-b7eb-403840c34304" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 97.273s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 790.543950] env[70020]: DEBUG oslo_vmware.api [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618127, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.631176] env[70020]: DEBUG oslo_vmware.api [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': task-3618128, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074706} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.631456] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 790.632313] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ab4500f-2516-4835-8052-8ba80199eb01 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.668161] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Reconfiguring VM instance instance-00000029 to attach disk [datastore2] 0add6226-3b90-4991-8f2b-81c35e72a7df/0add6226-3b90-4991-8f2b-81c35e72a7df.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 790.671942] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c57dd024-7595-4765-a318-a0be396bfe0a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.686701] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Releasing lock "refresh_cache-c56279e2-0fc6-4546-854c-82e5fda0e7a7" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 790.687245] env[70020]: DEBUG nova.compute.manager [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Instance network_info: |[{"id": "181209a0-c7c5-4fb9-ba9a-7f87cc194836", "address": "fa:16:3e:b4:12:47", "network": {"id": "43f80db7-dce1-4f89-90ff-8ba5981812e4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-604035824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "19128323d60a4992b0a2f837317d3f04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap181209a0-c7", "ovs_interfaceid": "181209a0-c7c5-4fb9-ba9a-7f87cc194836", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 790.687625] env[70020]: DEBUG oslo_concurrency.lockutils [req-eca18ddb-2d9b-4dd5-bc2c-7e4455c0f51f req-6b52805f-20f8-498f-8e10-ea8d5f96f645 service nova] Acquired lock "refresh_cache-c56279e2-0fc6-4546-854c-82e5fda0e7a7" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 790.687885] env[70020]: DEBUG nova.network.neutron [req-eca18ddb-2d9b-4dd5-bc2c-7e4455c0f51f req-6b52805f-20f8-498f-8e10-ea8d5f96f645 service nova] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Refreshing network info cache for port 181209a0-c7c5-4fb9-ba9a-7f87cc194836 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 790.690506] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b4:12:47', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '181209a0-c7c5-4fb9-ba9a-7f87cc194836', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 790.701937] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Creating folder: Project (19128323d60a4992b0a2f837317d3f04). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 790.704448] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f36ad207-dce7-415a-9cba-867a8d68ce4c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.711178] env[70020]: DEBUG oslo_vmware.api [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618129, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.256329} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.713157] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 790.713660] env[70020]: DEBUG oslo_vmware.api [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Waiting for the task: (returnval){ [ 790.713660] env[70020]: value = "task-3618130" [ 790.713660] env[70020]: _type = "Task" [ 790.713660] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.714949] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd94248f-622b-4494-bded-9cebbf1ba5e1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.727397] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Created folder: Project (19128323d60a4992b0a2f837317d3f04) in parent group-v721521. [ 790.727663] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Creating folder: Instances. Parent ref: group-v721661. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 790.728221] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d35d7e10-ef9b-4c8d-a5b7-c6fecbd6f75f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.752309] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Reconfiguring VM instance instance-0000002c to attach disk [datastore1] 55c20886-ae10-4326-a9de-f8577f320a99/55c20886-ae10-4326-a9de-f8577f320a99.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 790.756372] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6cfc547c-999d-4d71-98bb-5e13140d37bf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.771289] env[70020]: DEBUG oslo_vmware.api [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': task-3618130, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.773269] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Created folder: Instances in parent group-v721661. [ 790.773500] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 790.774093] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 790.774348] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fe866056-f7b3-4a0d-af5b-46208ea2cfc1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.789381] env[70020]: DEBUG oslo_vmware.api [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 790.789381] env[70020]: value = "task-3618133" [ 790.789381] env[70020]: _type = "Task" [ 790.789381] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.793997] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 790.793997] env[70020]: value = "task-3618134" [ 790.793997] env[70020]: _type = "Task" [ 790.793997] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.801537] env[70020]: DEBUG oslo_vmware.api [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618133, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.806856] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618134, 'name': CreateVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.842586] env[70020]: DEBUG nova.compute.utils [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 790.845872] env[70020]: DEBUG nova.compute.manager [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 790.846043] env[70020]: DEBUG nova.network.neutron [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 790.875940] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6dcabf2c-e104-4a77-8182-e9651d1bbd28 tempest-ServersListShow2100Test-971081473 tempest-ServersListShow2100Test-971081473-project-member] Lock "61bea079-9731-48d1-b472-b30226a0b5a1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 50.752s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 790.885779] env[70020]: DEBUG nova.policy [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c9be1a256e3b49f7a93dad4d718d7deb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '19128323d60a4992b0a2f837317d3f04', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 791.017601] env[70020]: DEBUG nova.compute.manager [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: f7a42358-f26a-4651-a929-d3836f050648] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 791.047350] env[70020]: DEBUG oslo_vmware.api [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618127, 'name': PowerOnVM_Task, 'duration_secs': 0.622516} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.047681] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 791.047918] env[70020]: INFO nova.compute.manager [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Took 8.71 seconds to spawn the instance on the hypervisor. [ 791.051237] env[70020]: DEBUG nova.compute.manager [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 791.051237] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-099d6946-1f37-4bff-a806-47fb29afec63 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.152167] env[70020]: DEBUG nova.network.neutron [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Successfully created port: dfb75973-77e7-42e3-96d1-4b8f24a37d6d {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 791.232590] env[70020]: DEBUG oslo_vmware.api [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': task-3618130, 'name': ReconfigVM_Task, 'duration_secs': 0.311339} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.235177] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Reconfigured VM instance instance-00000029 to attach disk [datastore2] 0add6226-3b90-4991-8f2b-81c35e72a7df/0add6226-3b90-4991-8f2b-81c35e72a7df.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 791.235857] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-66d28d8a-be43-4ae8-bd17-efc69ba5336c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.245470] env[70020]: DEBUG oslo_vmware.api [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Waiting for the task: (returnval){ [ 791.245470] env[70020]: value = "task-3618135" [ 791.245470] env[70020]: _type = "Task" [ 791.245470] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.253972] env[70020]: DEBUG oslo_vmware.api [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': task-3618135, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.305164] env[70020]: DEBUG oslo_vmware.api [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618133, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.314335] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618134, 'name': CreateVM_Task} progress is 25%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.348965] env[70020]: DEBUG nova.compute.manager [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 791.479097] env[70020]: DEBUG nova.compute.manager [req-30c2e9b9-3e45-45d6-b1a6-85fbf537c0e6 req-fc5c2628-90ab-47d5-ba90-e848e523831a service nova] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Received event network-changed-2a10027e-1a93-40ca-a079-297eb6af7618 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 791.479339] env[70020]: DEBUG nova.compute.manager [req-30c2e9b9-3e45-45d6-b1a6-85fbf537c0e6 req-fc5c2628-90ab-47d5-ba90-e848e523831a service nova] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Refreshing instance network info cache due to event network-changed-2a10027e-1a93-40ca-a079-297eb6af7618. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 791.479599] env[70020]: DEBUG oslo_concurrency.lockutils [req-30c2e9b9-3e45-45d6-b1a6-85fbf537c0e6 req-fc5c2628-90ab-47d5-ba90-e848e523831a service nova] Acquiring lock "refresh_cache-c9ce57f3-f9a2-40aa-b7eb-403840c34304" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.479779] env[70020]: DEBUG oslo_concurrency.lockutils [req-30c2e9b9-3e45-45d6-b1a6-85fbf537c0e6 req-fc5c2628-90ab-47d5-ba90-e848e523831a service nova] Acquired lock "refresh_cache-c9ce57f3-f9a2-40aa-b7eb-403840c34304" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 791.479960] env[70020]: DEBUG nova.network.neutron [req-30c2e9b9-3e45-45d6-b1a6-85fbf537c0e6 req-fc5c2628-90ab-47d5-ba90-e848e523831a service nova] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Refreshing network info cache for port 2a10027e-1a93-40ca-a079-297eb6af7618 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 791.539445] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 791.551131] env[70020]: DEBUG nova.network.neutron [req-eca18ddb-2d9b-4dd5-bc2c-7e4455c0f51f req-6b52805f-20f8-498f-8e10-ea8d5f96f645 service nova] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Updated VIF entry in instance network info cache for port 181209a0-c7c5-4fb9-ba9a-7f87cc194836. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 791.551287] env[70020]: DEBUG nova.network.neutron [req-eca18ddb-2d9b-4dd5-bc2c-7e4455c0f51f req-6b52805f-20f8-498f-8e10-ea8d5f96f645 service nova] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Updating instance_info_cache with network_info: [{"id": "181209a0-c7c5-4fb9-ba9a-7f87cc194836", "address": "fa:16:3e:b4:12:47", "network": {"id": "43f80db7-dce1-4f89-90ff-8ba5981812e4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-604035824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "19128323d60a4992b0a2f837317d3f04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap181209a0-c7", "ovs_interfaceid": "181209a0-c7c5-4fb9-ba9a-7f87cc194836", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.573023] env[70020]: INFO nova.compute.manager [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Took 64.02 seconds to build instance. [ 791.758448] env[70020]: DEBUG oslo_vmware.api [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': task-3618135, 'name': Rename_Task, 'duration_secs': 0.15926} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.761080] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 791.761623] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-96bcb4df-a566-458f-8117-7c6669c9b4e0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.768792] env[70020]: DEBUG oslo_vmware.api [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Waiting for the task: (returnval){ [ 791.768792] env[70020]: value = "task-3618136" [ 791.768792] env[70020]: _type = "Task" [ 791.768792] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.779100] env[70020]: DEBUG oslo_vmware.api [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': task-3618136, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.809118] env[70020]: DEBUG oslo_vmware.api [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618133, 'name': ReconfigVM_Task, 'duration_secs': 0.788904} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.809663] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Reconfigured VM instance instance-0000002c to attach disk [datastore1] 55c20886-ae10-4326-a9de-f8577f320a99/55c20886-ae10-4326-a9de-f8577f320a99.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 791.810926] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5a39d14a-05d6-4619-a73d-8a27f0944ff3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.816588] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618134, 'name': CreateVM_Task, 'duration_secs': 0.737362} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.819429] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 791.820939] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.821241] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 791.821887] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 791.822270] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2a1f49f-36e4-4036-ab0c-14007e862bfb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.825138] env[70020]: DEBUG oslo_vmware.api [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 791.825138] env[70020]: value = "task-3618137" [ 791.825138] env[70020]: _type = "Task" [ 791.825138] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.833129] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 791.833129] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52c5e013-3fff-cb90-f032-e421152f7412" [ 791.833129] env[70020]: _type = "Task" [ 791.833129] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.840377] env[70020]: DEBUG oslo_vmware.api [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618137, 'name': Rename_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.845935] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52c5e013-3fff-cb90-f032-e421152f7412, 'name': SearchDatastore_Task, 'duration_secs': 0.010185} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.848747] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 791.849147] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 791.849512] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.849779] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 791.850160] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 791.850722] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-63527b84-bd8d-4b06-9af0-c5f41ba699fa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.865640] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 791.865640] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 791.865640] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38abbc01-7e1b-458c-b137-ad578ab98af5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.876934] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 791.876934] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5206348a-5ece-9f84-6f01-4fc268ff01f9" [ 791.876934] env[70020]: _type = "Task" [ 791.876934] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.884120] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5206348a-5ece-9f84-6f01-4fc268ff01f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.942907] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3a29d1e-f950-4054-8ddd-2a64d5c3fa28 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.951366] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1612f05b-738e-48be-952a-fba4a7a2f3dc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.984520] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d079c7b4-8af8-45b1-81ec-3aa6d2dece29 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.991957] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e64b57c2-d214-48eb-a9e1-226fbb9bd6ee {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.008787] env[70020]: DEBUG nova.compute.provider_tree [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 792.056165] env[70020]: DEBUG oslo_concurrency.lockutils [req-eca18ddb-2d9b-4dd5-bc2c-7e4455c0f51f req-6b52805f-20f8-498f-8e10-ea8d5f96f645 service nova] Releasing lock "refresh_cache-c56279e2-0fc6-4546-854c-82e5fda0e7a7" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 792.074772] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2caed8ec-591d-4579-9059-17e0c57192e8 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "c08166c5-2c31-4d40-a61c-c541924eb49c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 93.756s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 792.247816] env[70020]: DEBUG nova.network.neutron [req-30c2e9b9-3e45-45d6-b1a6-85fbf537c0e6 req-fc5c2628-90ab-47d5-ba90-e848e523831a service nova] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Updated VIF entry in instance network info cache for port 2a10027e-1a93-40ca-a079-297eb6af7618. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 792.247908] env[70020]: DEBUG nova.network.neutron [req-30c2e9b9-3e45-45d6-b1a6-85fbf537c0e6 req-fc5c2628-90ab-47d5-ba90-e848e523831a service nova] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Updating instance_info_cache with network_info: [{"id": "2a10027e-1a93-40ca-a079-297eb6af7618", "address": "fa:16:3e:3d:46:13", "network": {"id": "7e18bd4a-4d0c-4e7f-a52d-bbfb68dfe3f1", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-642722899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2a0a96b236e4a7c8f6878d0becfc66b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89f807d9-140f-4a6f-8bce-96795f9482ee", "external-id": "nsx-vlan-transportzone-762", "segmentation_id": 762, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a10027e-1a", "ovs_interfaceid": "2a10027e-1a93-40ca-a079-297eb6af7618", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 792.280362] env[70020]: DEBUG oslo_vmware.api [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': task-3618136, 'name': PowerOnVM_Task, 'duration_secs': 0.477658} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.280636] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 792.280833] env[70020]: INFO nova.compute.manager [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Took 15.47 seconds to spawn the instance on the hypervisor. [ 792.281014] env[70020]: DEBUG nova.compute.manager [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 792.281855] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bfc645c-83a4-49c2-9e0e-0411ca1de5d0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.340121] env[70020]: DEBUG oslo_vmware.api [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618137, 'name': Rename_Task, 'duration_secs': 0.151232} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.342747] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 792.342747] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6952c188-959b-416e-8ac4-29b004c61700 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.347892] env[70020]: DEBUG oslo_vmware.api [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 792.347892] env[70020]: value = "task-3618138" [ 792.347892] env[70020]: _type = "Task" [ 792.347892] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.362949] env[70020]: DEBUG oslo_vmware.api [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618138, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.366504] env[70020]: DEBUG nova.compute.manager [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 792.392035] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5206348a-5ece-9f84-6f01-4fc268ff01f9, 'name': SearchDatastore_Task, 'duration_secs': 0.009203} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.392035] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b140439c-56ce-4e29-94b1-ce319b3733a7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.401284] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 792.401284] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52a03d54-7d04-c85d-50f9-1ba2a3dc3526" [ 792.401284] env[70020]: _type = "Task" [ 792.401284] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.407447] env[70020]: DEBUG nova.virt.hardware [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 792.407690] env[70020]: DEBUG nova.virt.hardware [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 792.407913] env[70020]: DEBUG nova.virt.hardware [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 792.408041] env[70020]: DEBUG nova.virt.hardware [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 792.408168] env[70020]: DEBUG nova.virt.hardware [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 792.408386] env[70020]: DEBUG nova.virt.hardware [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 792.408507] env[70020]: DEBUG nova.virt.hardware [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 792.408739] env[70020]: DEBUG nova.virt.hardware [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 792.408878] env[70020]: DEBUG nova.virt.hardware [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 792.409059] env[70020]: DEBUG nova.virt.hardware [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 792.409123] env[70020]: DEBUG nova.virt.hardware [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 792.410026] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed923064-27ee-4841-9be3-9e1e20aa656b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.415709] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52a03d54-7d04-c85d-50f9-1ba2a3dc3526, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.420753] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a913d75-0a08-4be6-9490-c40c6f24802f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.515885] env[70020]: DEBUG nova.scheduler.client.report [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 792.580430] env[70020]: DEBUG nova.compute.manager [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 792.753612] env[70020]: DEBUG oslo_concurrency.lockutils [req-30c2e9b9-3e45-45d6-b1a6-85fbf537c0e6 req-fc5c2628-90ab-47d5-ba90-e848e523831a service nova] Releasing lock "refresh_cache-c9ce57f3-f9a2-40aa-b7eb-403840c34304" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 792.803389] env[70020]: INFO nova.compute.manager [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Took 74.09 seconds to build instance. [ 792.859645] env[70020]: DEBUG oslo_vmware.api [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618138, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.868518] env[70020]: DEBUG nova.network.neutron [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Successfully updated port: dfb75973-77e7-42e3-96d1-4b8f24a37d6d {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 792.916620] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52a03d54-7d04-c85d-50f9-1ba2a3dc3526, 'name': SearchDatastore_Task, 'duration_secs': 0.012293} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.916620] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 792.916620] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] c56279e2-0fc6-4546-854c-82e5fda0e7a7/c56279e2-0fc6-4546-854c-82e5fda0e7a7.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 792.916620] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e4b97b6e-ab8c-487d-a442-797f3e7d111a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.925722] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 792.925722] env[70020]: value = "task-3618139" [ 792.925722] env[70020]: _type = "Task" [ 792.925722] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.935440] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618139, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.937463] env[70020]: DEBUG nova.compute.manager [req-37c62508-b92f-4e65-8260-fa7fa5b8ce2b req-42cf1ab9-7638-4f54-9259-ac9b3510ebaa service nova] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Received event network-vif-plugged-dfb75973-77e7-42e3-96d1-4b8f24a37d6d {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 792.937697] env[70020]: DEBUG oslo_concurrency.lockutils [req-37c62508-b92f-4e65-8260-fa7fa5b8ce2b req-42cf1ab9-7638-4f54-9259-ac9b3510ebaa service nova] Acquiring lock "48efbd17-ff4e-426a-a135-f43cae8c97d0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 792.937882] env[70020]: DEBUG oslo_concurrency.lockutils [req-37c62508-b92f-4e65-8260-fa7fa5b8ce2b req-42cf1ab9-7638-4f54-9259-ac9b3510ebaa service nova] Lock "48efbd17-ff4e-426a-a135-f43cae8c97d0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 792.938226] env[70020]: DEBUG oslo_concurrency.lockutils [req-37c62508-b92f-4e65-8260-fa7fa5b8ce2b req-42cf1ab9-7638-4f54-9259-ac9b3510ebaa service nova] Lock "48efbd17-ff4e-426a-a135-f43cae8c97d0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 792.938582] env[70020]: DEBUG nova.compute.manager [req-37c62508-b92f-4e65-8260-fa7fa5b8ce2b req-42cf1ab9-7638-4f54-9259-ac9b3510ebaa service nova] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] No waiting events found dispatching network-vif-plugged-dfb75973-77e7-42e3-96d1-4b8f24a37d6d {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 792.938798] env[70020]: WARNING nova.compute.manager [req-37c62508-b92f-4e65-8260-fa7fa5b8ce2b req-42cf1ab9-7638-4f54-9259-ac9b3510ebaa service nova] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Received unexpected event network-vif-plugged-dfb75973-77e7-42e3-96d1-4b8f24a37d6d for instance with vm_state building and task_state spawning. [ 793.023612] env[70020]: DEBUG oslo_concurrency.lockutils [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.688s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 793.024245] env[70020]: DEBUG nova.compute.manager [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 793.027808] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1ef4b391-5fed-4275-80cb-ac68c9126c66 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Acquiring lock "8f7e4e69-0796-469f-8a2b-4e19fbf15ed3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 793.028061] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1ef4b391-5fed-4275-80cb-ac68c9126c66 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Lock "8f7e4e69-0796-469f-8a2b-4e19fbf15ed3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 793.028319] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1ef4b391-5fed-4275-80cb-ac68c9126c66 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Acquiring lock "8f7e4e69-0796-469f-8a2b-4e19fbf15ed3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 793.028538] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1ef4b391-5fed-4275-80cb-ac68c9126c66 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Lock "8f7e4e69-0796-469f-8a2b-4e19fbf15ed3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 793.028741] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1ef4b391-5fed-4275-80cb-ac68c9126c66 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Lock "8f7e4e69-0796-469f-8a2b-4e19fbf15ed3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 793.030918] env[70020]: DEBUG oslo_concurrency.lockutils [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.163s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 793.035022] env[70020]: INFO nova.compute.claims [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 793.035500] env[70020]: INFO nova.compute.manager [None req-1ef4b391-5fed-4275-80cb-ac68c9126c66 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Terminating instance [ 793.103532] env[70020]: DEBUG oslo_concurrency.lockutils [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 793.306733] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4edd1b86-297d-4398-b6d1-c4c494ea9bca tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Lock "0add6226-3b90-4991-8f2b-81c35e72a7df" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 107.388s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 793.361572] env[70020]: DEBUG oslo_vmware.api [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618138, 'name': PowerOnVM_Task, 'duration_secs': 0.837822} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.361900] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 793.362131] env[70020]: INFO nova.compute.manager [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Took 8.31 seconds to spawn the instance on the hypervisor. [ 793.362313] env[70020]: DEBUG nova.compute.manager [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 793.363131] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0862c6b-bf8e-4708-a86a-fca79211c9a4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.369463] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquiring lock "refresh_cache-48efbd17-ff4e-426a-a135-f43cae8c97d0" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.369662] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquired lock "refresh_cache-48efbd17-ff4e-426a-a135-f43cae8c97d0" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 793.369817] env[70020]: DEBUG nova.network.neutron [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 793.439204] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618139, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.533201] env[70020]: DEBUG nova.compute.utils [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 793.535834] env[70020]: DEBUG nova.compute.manager [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 793.535834] env[70020]: DEBUG nova.network.neutron [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 793.544296] env[70020]: DEBUG nova.compute.manager [None req-1ef4b391-5fed-4275-80cb-ac68c9126c66 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 793.544296] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-1ef4b391-5fed-4275-80cb-ac68c9126c66 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 793.545499] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67ec7e7a-fd7d-4a10-b3f4-ad34b49d7bf8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.556564] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ef4b391-5fed-4275-80cb-ac68c9126c66 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 793.558241] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-58359366-c73d-4716-b6f2-1a8216c1013d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.565182] env[70020]: DEBUG oslo_vmware.api [None req-1ef4b391-5fed-4275-80cb-ac68c9126c66 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Waiting for the task: (returnval){ [ 793.565182] env[70020]: value = "task-3618140" [ 793.565182] env[70020]: _type = "Task" [ 793.565182] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.575045] env[70020]: DEBUG oslo_vmware.api [None req-1ef4b391-5fed-4275-80cb-ac68c9126c66 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Task: {'id': task-3618140, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.629567] env[70020]: DEBUG nova.policy [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0a96138e8caf4575854cbe0224b66030', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a63e8bb4fcd844f69aaeade95326a91b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 793.657511] env[70020]: DEBUG oslo_concurrency.lockutils [None req-736a3162-3a75-4f3b-b006-191e0012565e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "c08166c5-2c31-4d40-a61c-c541924eb49c" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 793.659072] env[70020]: DEBUG oslo_concurrency.lockutils [None req-736a3162-3a75-4f3b-b006-191e0012565e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "c08166c5-2c31-4d40-a61c-c541924eb49c" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 793.810013] env[70020]: DEBUG nova.compute.manager [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 793.837289] env[70020]: DEBUG nova.compute.manager [req-4e5a8fd2-0677-4846-9a5f-b841edf561cd req-81924e23-673a-40e1-8775-31d2ebc80da1 service nova] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Received event network-changed-2a10027e-1a93-40ca-a079-297eb6af7618 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 793.838809] env[70020]: DEBUG nova.compute.manager [req-4e5a8fd2-0677-4846-9a5f-b841edf561cd req-81924e23-673a-40e1-8775-31d2ebc80da1 service nova] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Refreshing instance network info cache due to event network-changed-2a10027e-1a93-40ca-a079-297eb6af7618. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 793.838809] env[70020]: DEBUG oslo_concurrency.lockutils [req-4e5a8fd2-0677-4846-9a5f-b841edf561cd req-81924e23-673a-40e1-8775-31d2ebc80da1 service nova] Acquiring lock "refresh_cache-c9ce57f3-f9a2-40aa-b7eb-403840c34304" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.838809] env[70020]: DEBUG oslo_concurrency.lockutils [req-4e5a8fd2-0677-4846-9a5f-b841edf561cd req-81924e23-673a-40e1-8775-31d2ebc80da1 service nova] Acquired lock "refresh_cache-c9ce57f3-f9a2-40aa-b7eb-403840c34304" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 793.838809] env[70020]: DEBUG nova.network.neutron [req-4e5a8fd2-0677-4846-9a5f-b841edf561cd req-81924e23-673a-40e1-8775-31d2ebc80da1 service nova] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Refreshing network info cache for port 2a10027e-1a93-40ca-a079-297eb6af7618 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 793.887386] env[70020]: INFO nova.compute.manager [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Took 65.31 seconds to build instance. [ 793.922668] env[70020]: DEBUG nova.network.neutron [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 793.936230] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618139, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.703711} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.936501] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] c56279e2-0fc6-4546-854c-82e5fda0e7a7/c56279e2-0fc6-4546-854c-82e5fda0e7a7.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 793.936715] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 793.937010] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-42927f4d-af38-4375-a70e-45d763211453 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.944041] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 793.944041] env[70020]: value = "task-3618141" [ 793.944041] env[70020]: _type = "Task" [ 793.944041] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.954025] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618141, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.040886] env[70020]: DEBUG nova.compute.manager [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 794.076129] env[70020]: DEBUG oslo_vmware.api [None req-1ef4b391-5fed-4275-80cb-ac68c9126c66 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Task: {'id': task-3618140, 'name': PowerOffVM_Task, 'duration_secs': 0.364284} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.078386] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ef4b391-5fed-4275-80cb-ac68c9126c66 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 794.079154] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-1ef4b391-5fed-4275-80cb-ac68c9126c66 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 794.079154] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b1c2812f-c4a9-457e-9d74-93ea0cec05bc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.080791] env[70020]: DEBUG oslo_concurrency.lockutils [None req-895ed8e8-53ea-4732-9f9c-013a1d5d7bc7 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Acquiring lock "0add6226-3b90-4991-8f2b-81c35e72a7df" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 794.081015] env[70020]: DEBUG oslo_concurrency.lockutils [None req-895ed8e8-53ea-4732-9f9c-013a1d5d7bc7 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Lock "0add6226-3b90-4991-8f2b-81c35e72a7df" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 794.081533] env[70020]: DEBUG oslo_concurrency.lockutils [None req-895ed8e8-53ea-4732-9f9c-013a1d5d7bc7 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Acquiring lock "0add6226-3b90-4991-8f2b-81c35e72a7df-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 794.081868] env[70020]: DEBUG oslo_concurrency.lockutils [None req-895ed8e8-53ea-4732-9f9c-013a1d5d7bc7 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Lock "0add6226-3b90-4991-8f2b-81c35e72a7df-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 794.082018] env[70020]: DEBUG oslo_concurrency.lockutils [None req-895ed8e8-53ea-4732-9f9c-013a1d5d7bc7 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Lock "0add6226-3b90-4991-8f2b-81c35e72a7df-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 794.084935] env[70020]: DEBUG nova.network.neutron [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Successfully created port: 52cf3b73-bbee-4e96-91f2-a1caa2041501 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 794.087454] env[70020]: INFO nova.compute.manager [None req-895ed8e8-53ea-4732-9f9c-013a1d5d7bc7 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Terminating instance [ 794.144986] env[70020]: DEBUG nova.network.neutron [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Updating instance_info_cache with network_info: [{"id": "dfb75973-77e7-42e3-96d1-4b8f24a37d6d", "address": "fa:16:3e:87:49:db", "network": {"id": "43f80db7-dce1-4f89-90ff-8ba5981812e4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-604035824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "19128323d60a4992b0a2f837317d3f04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdfb75973-77", "ovs_interfaceid": "dfb75973-77e7-42e3-96d1-4b8f24a37d6d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 794.162669] env[70020]: DEBUG nova.compute.utils [None req-736a3162-3a75-4f3b-b006-191e0012565e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 794.171181] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-1ef4b391-5fed-4275-80cb-ac68c9126c66 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 794.171844] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-1ef4b391-5fed-4275-80cb-ac68c9126c66 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 794.172089] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ef4b391-5fed-4275-80cb-ac68c9126c66 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Deleting the datastore file [datastore1] 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 794.173751] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-54337ea0-eab1-4613-9d80-21e199652a6a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.183214] env[70020]: DEBUG oslo_vmware.api [None req-1ef4b391-5fed-4275-80cb-ac68c9126c66 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Waiting for the task: (returnval){ [ 794.183214] env[70020]: value = "task-3618143" [ 794.183214] env[70020]: _type = "Task" [ 794.183214] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.202418] env[70020]: DEBUG oslo_vmware.api [None req-1ef4b391-5fed-4275-80cb-ac68c9126c66 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Task: {'id': task-3618143, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.337253] env[70020]: DEBUG oslo_concurrency.lockutils [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 794.389864] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d284dd7-6c4b-45bc-a444-f692cba1a8e1 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "55c20886-ae10-4326-a9de-f8577f320a99" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 95.087s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 794.458576] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618141, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.232982} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.458838] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 794.459645] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75c1960c-3e82-43ff-9262-c97cbf542fb6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.484451] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Reconfiguring VM instance instance-0000002d to attach disk [datastore2] c56279e2-0fc6-4546-854c-82e5fda0e7a7/c56279e2-0fc6-4546-854c-82e5fda0e7a7.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 794.486701] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-28806a1c-49eb-4861-a579-4beebed9674c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.508340] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 794.508340] env[70020]: value = "task-3618144" [ 794.508340] env[70020]: _type = "Task" [ 794.508340] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.517588] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618144, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.595339] env[70020]: DEBUG nova.compute.manager [None req-895ed8e8-53ea-4732-9f9c-013a1d5d7bc7 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 794.595576] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-895ed8e8-53ea-4732-9f9c-013a1d5d7bc7 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 794.596456] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b2d2b80-16de-441b-965b-4be2af26e082 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.608194] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-895ed8e8-53ea-4732-9f9c-013a1d5d7bc7 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 794.608464] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eebbb0b3-7fd6-47fb-9638-08391835c69e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.614947] env[70020]: DEBUG oslo_vmware.api [None req-895ed8e8-53ea-4732-9f9c-013a1d5d7bc7 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Waiting for the task: (returnval){ [ 794.614947] env[70020]: value = "task-3618145" [ 794.614947] env[70020]: _type = "Task" [ 794.614947] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.616461] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d1d162e-6026-4405-81e3-1a601ace9755 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.630259] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ddac1c5-9706-494a-bbac-fc67addfd697 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.634511] env[70020]: DEBUG oslo_vmware.api [None req-895ed8e8-53ea-4732-9f9c-013a1d5d7bc7 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': task-3618145, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.635664] env[70020]: DEBUG nova.network.neutron [req-4e5a8fd2-0677-4846-9a5f-b841edf561cd req-81924e23-673a-40e1-8775-31d2ebc80da1 service nova] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Updated VIF entry in instance network info cache for port 2a10027e-1a93-40ca-a079-297eb6af7618. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 794.636159] env[70020]: DEBUG nova.network.neutron [req-4e5a8fd2-0677-4846-9a5f-b841edf561cd req-81924e23-673a-40e1-8775-31d2ebc80da1 service nova] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Updating instance_info_cache with network_info: [{"id": "2a10027e-1a93-40ca-a079-297eb6af7618", "address": "fa:16:3e:3d:46:13", "network": {"id": "7e18bd4a-4d0c-4e7f-a52d-bbfb68dfe3f1", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-642722899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2a0a96b236e4a7c8f6878d0becfc66b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89f807d9-140f-4a6f-8bce-96795f9482ee", "external-id": "nsx-vlan-transportzone-762", "segmentation_id": 762, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a10027e-1a", "ovs_interfaceid": "2a10027e-1a93-40ca-a079-297eb6af7618", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 794.664853] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Releasing lock "refresh_cache-48efbd17-ff4e-426a-a135-f43cae8c97d0" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 794.665204] env[70020]: DEBUG nova.compute.manager [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Instance network_info: |[{"id": "dfb75973-77e7-42e3-96d1-4b8f24a37d6d", "address": "fa:16:3e:87:49:db", "network": {"id": "43f80db7-dce1-4f89-90ff-8ba5981812e4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-604035824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "19128323d60a4992b0a2f837317d3f04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdfb75973-77", "ovs_interfaceid": "dfb75973-77e7-42e3-96d1-4b8f24a37d6d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 794.666479] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:87:49:db', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dfb75973-77e7-42e3-96d1-4b8f24a37d6d', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 794.673894] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 794.674604] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5965834-1d66-42a9-9463-55c966e97a7d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.677942] env[70020]: DEBUG oslo_concurrency.lockutils [None req-736a3162-3a75-4f3b-b006-191e0012565e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "c08166c5-2c31-4d40-a61c-c541924eb49c" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.020s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 794.678388] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 794.678653] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-73e317b4-0ba4-4160-bb30-8bbb41fa765f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.702415] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94c706c4-8d73-4d09-a314-3fc84d35be98 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.710877] env[70020]: DEBUG oslo_vmware.api [None req-1ef4b391-5fed-4275-80cb-ac68c9126c66 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Task: {'id': task-3618143, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.711235] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 794.711235] env[70020]: value = "task-3618146" [ 794.711235] env[70020]: _type = "Task" [ 794.711235] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.723218] env[70020]: DEBUG nova.compute.provider_tree [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 794.729791] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618146, 'name': CreateVM_Task} progress is 10%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.850071] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Acquiring lock "ef85421b-b679-4f38-b052-5695baa2e405" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 794.850354] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Lock "ef85421b-b679-4f38-b052-5695baa2e405" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 794.893087] env[70020]: DEBUG nova.compute.manager [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 795.017751] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618144, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.053867] env[70020]: DEBUG nova.compute.manager [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 795.080266] env[70020]: DEBUG nova.virt.hardware [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 795.080517] env[70020]: DEBUG nova.virt.hardware [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 795.080669] env[70020]: DEBUG nova.virt.hardware [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 795.080848] env[70020]: DEBUG nova.virt.hardware [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 795.080990] env[70020]: DEBUG nova.virt.hardware [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 795.081152] env[70020]: DEBUG nova.virt.hardware [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 795.081359] env[70020]: DEBUG nova.virt.hardware [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 795.081509] env[70020]: DEBUG nova.virt.hardware [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 795.081705] env[70020]: DEBUG nova.virt.hardware [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 795.081883] env[70020]: DEBUG nova.virt.hardware [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 795.082490] env[70020]: DEBUG nova.virt.hardware [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 795.082989] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d87576-2ef5-478c-bcc3-c20dcd6cdf0e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.092054] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb03fd4d-4adb-4d30-bb7b-acdfb87b670f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.125077] env[70020]: DEBUG oslo_vmware.api [None req-895ed8e8-53ea-4732-9f9c-013a1d5d7bc7 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': task-3618145, 'name': PowerOffVM_Task, 'duration_secs': 0.231145} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.125350] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-895ed8e8-53ea-4732-9f9c-013a1d5d7bc7 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 795.125517] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-895ed8e8-53ea-4732-9f9c-013a1d5d7bc7 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 795.125760] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e964dde8-e248-4160-a51f-d759349b26fc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.139008] env[70020]: DEBUG oslo_concurrency.lockutils [req-4e5a8fd2-0677-4846-9a5f-b841edf561cd req-81924e23-673a-40e1-8775-31d2ebc80da1 service nova] Releasing lock "refresh_cache-c9ce57f3-f9a2-40aa-b7eb-403840c34304" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 795.204904] env[70020]: DEBUG oslo_vmware.api [None req-1ef4b391-5fed-4275-80cb-ac68c9126c66 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Task: {'id': task-3618143, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.553882} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.205194] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ef4b391-5fed-4275-80cb-ac68c9126c66 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 795.205377] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-1ef4b391-5fed-4275-80cb-ac68c9126c66 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 795.205547] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-1ef4b391-5fed-4275-80cb-ac68c9126c66 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 795.205716] env[70020]: INFO nova.compute.manager [None req-1ef4b391-5fed-4275-80cb-ac68c9126c66 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Took 1.66 seconds to destroy the instance on the hypervisor. [ 795.205955] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1ef4b391-5fed-4275-80cb-ac68c9126c66 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 795.206175] env[70020]: DEBUG nova.compute.manager [-] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 795.206271] env[70020]: DEBUG nova.network.neutron [-] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 795.221029] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618146, 'name': CreateVM_Task, 'duration_secs': 0.358342} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.221143] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 795.221819] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.222474] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 795.222474] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 795.222679] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50c4c5a5-8103-4f43-8488-f61385d4f7a0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.226748] env[70020]: DEBUG nova.scheduler.client.report [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 795.234539] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 795.234539] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5201e8b4-bb25-e381-04d3-3cc594eb2f17" [ 795.234539] env[70020]: _type = "Task" [ 795.234539] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.245109] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5201e8b4-bb25-e381-04d3-3cc594eb2f17, 'name': SearchDatastore_Task, 'duration_secs': 0.010736} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.245831] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 795.246070] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 795.246304] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.246449] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 795.246630] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 795.247329] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-85ecaf66-9624-4ef1-96c2-94e3cdf137a7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.256915] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 795.257123] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 795.259512] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ddcad1fa-fa50-46b4-bdcf-7f73d2111fb9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.261307] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-895ed8e8-53ea-4732-9f9c-013a1d5d7bc7 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 795.261545] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-895ed8e8-53ea-4732-9f9c-013a1d5d7bc7 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 795.261750] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-895ed8e8-53ea-4732-9f9c-013a1d5d7bc7 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Deleting the datastore file [datastore2] 0add6226-3b90-4991-8f2b-81c35e72a7df {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 795.262201] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-19e80804-4bd0-47bb-9d18-b18c1660142a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.267126] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 795.267126] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5204fb6d-922f-afbb-4d2e-29219807d015" [ 795.267126] env[70020]: _type = "Task" [ 795.267126] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.274897] env[70020]: DEBUG oslo_vmware.api [None req-895ed8e8-53ea-4732-9f9c-013a1d5d7bc7 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Waiting for the task: (returnval){ [ 795.274897] env[70020]: value = "task-3618148" [ 795.274897] env[70020]: _type = "Task" [ 795.274897] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.282382] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5204fb6d-922f-afbb-4d2e-29219807d015, 'name': SearchDatastore_Task, 'duration_secs': 0.010079} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.283890] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74a4f43a-6f92-4348-83c3-9ad5164de828 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.289014] env[70020]: DEBUG oslo_vmware.api [None req-895ed8e8-53ea-4732-9f9c-013a1d5d7bc7 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': task-3618148, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.291941] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 795.291941] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]524eeea9-edb5-994d-3f66-9b7e81f28982" [ 795.291941] env[70020]: _type = "Task" [ 795.291941] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.299921] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]524eeea9-edb5-994d-3f66-9b7e81f28982, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.424480] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 795.519703] env[70020]: DEBUG nova.compute.manager [req-ad876360-7fc9-4796-ba8d-a760820c04c2 req-a5b51ec4-b183-4206-84de-1dc881861551 service nova] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Received event network-changed-dfb75973-77e7-42e3-96d1-4b8f24a37d6d {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 795.522076] env[70020]: DEBUG nova.compute.manager [req-ad876360-7fc9-4796-ba8d-a760820c04c2 req-a5b51ec4-b183-4206-84de-1dc881861551 service nova] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Refreshing instance network info cache due to event network-changed-dfb75973-77e7-42e3-96d1-4b8f24a37d6d. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 795.522076] env[70020]: DEBUG oslo_concurrency.lockutils [req-ad876360-7fc9-4796-ba8d-a760820c04c2 req-a5b51ec4-b183-4206-84de-1dc881861551 service nova] Acquiring lock "refresh_cache-48efbd17-ff4e-426a-a135-f43cae8c97d0" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.522076] env[70020]: DEBUG oslo_concurrency.lockutils [req-ad876360-7fc9-4796-ba8d-a760820c04c2 req-a5b51ec4-b183-4206-84de-1dc881861551 service nova] Acquired lock "refresh_cache-48efbd17-ff4e-426a-a135-f43cae8c97d0" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 795.522076] env[70020]: DEBUG nova.network.neutron [req-ad876360-7fc9-4796-ba8d-a760820c04c2 req-a5b51ec4-b183-4206-84de-1dc881861551 service nova] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Refreshing network info cache for port dfb75973-77e7-42e3-96d1-4b8f24a37d6d {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 795.527560] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618144, 'name': ReconfigVM_Task, 'duration_secs': 0.578808} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.528933] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Reconfigured VM instance instance-0000002d to attach disk [datastore2] c56279e2-0fc6-4546-854c-82e5fda0e7a7/c56279e2-0fc6-4546-854c-82e5fda0e7a7.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 795.531758] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-490e4f13-3314-47ac-858c-4327c09d5eae {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.538095] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 795.538095] env[70020]: value = "task-3618149" [ 795.538095] env[70020]: _type = "Task" [ 795.538095] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.545334] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618149, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.737266] env[70020]: DEBUG oslo_concurrency.lockutils [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.706s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.743351] env[70020]: DEBUG oslo_concurrency.lockutils [None req-736a3162-3a75-4f3b-b006-191e0012565e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "c08166c5-2c31-4d40-a61c-c541924eb49c" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 795.743351] env[70020]: DEBUG oslo_concurrency.lockutils [None req-736a3162-3a75-4f3b-b006-191e0012565e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "c08166c5-2c31-4d40-a61c-c541924eb49c" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.743351] env[70020]: INFO nova.compute.manager [None req-736a3162-3a75-4f3b-b006-191e0012565e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Attaching volume 098e0380-2259-4c44-9668-fe847c3f06db to /dev/sdb [ 795.743351] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.765s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.743351] env[70020]: DEBUG nova.objects.instance [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Lazy-loading 'resources' on Instance uuid b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 795.782046] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ad844869-04b3-41ed-a19e-5dce24fcc30c tempest-ServersAdminTestJSON-561524990 tempest-ServersAdminTestJSON-561524990-project-admin] Acquiring lock "refresh_cache-55c20886-ae10-4326-a9de-f8577f320a99" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.782046] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ad844869-04b3-41ed-a19e-5dce24fcc30c tempest-ServersAdminTestJSON-561524990 tempest-ServersAdminTestJSON-561524990-project-admin] Acquired lock "refresh_cache-55c20886-ae10-4326-a9de-f8577f320a99" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 795.782358] env[70020]: DEBUG nova.network.neutron [None req-ad844869-04b3-41ed-a19e-5dce24fcc30c tempest-ServersAdminTestJSON-561524990 tempest-ServersAdminTestJSON-561524990-project-admin] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 795.789281] env[70020]: DEBUG oslo_vmware.api [None req-895ed8e8-53ea-4732-9f9c-013a1d5d7bc7 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': task-3618148, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155734} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.790505] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-895ed8e8-53ea-4732-9f9c-013a1d5d7bc7 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 795.790695] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-895ed8e8-53ea-4732-9f9c-013a1d5d7bc7 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 795.790857] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-895ed8e8-53ea-4732-9f9c-013a1d5d7bc7 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 795.791033] env[70020]: INFO nova.compute.manager [None req-895ed8e8-53ea-4732-9f9c-013a1d5d7bc7 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Took 1.20 seconds to destroy the instance on the hypervisor. [ 795.791261] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-895ed8e8-53ea-4732-9f9c-013a1d5d7bc7 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 795.792019] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cec178f-249d-44b3-9a4a-068ba71d6ec2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.796789] env[70020]: DEBUG nova.compute.manager [-] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 795.796912] env[70020]: DEBUG nova.network.neutron [-] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 795.811504] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc478afa-954f-457b-8bda-35b4b9642e6e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.815340] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]524eeea9-edb5-994d-3f66-9b7e81f28982, 'name': SearchDatastore_Task, 'duration_secs': 0.008236} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.815602] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 795.815885] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 48efbd17-ff4e-426a-a135-f43cae8c97d0/48efbd17-ff4e-426a-a135-f43cae8c97d0.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 795.816484] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5a7b9449-b48a-4e59-8aab-ee4b236c59e8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.826045] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 795.826045] env[70020]: value = "task-3618150" [ 795.826045] env[70020]: _type = "Task" [ 795.826045] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.831364] env[70020]: DEBUG nova.virt.block_device [None req-736a3162-3a75-4f3b-b006-191e0012565e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Updating existing volume attachment record: 6e1ca2ba-7b5f-441a-a842-f18d7b48bdbc {{(pid=70020) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 795.838230] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618150, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.877713] env[70020]: DEBUG nova.network.neutron [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Successfully updated port: 52cf3b73-bbee-4e96-91f2-a1caa2041501 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 796.030951] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Acquiring lock "4335f92a-897a-4779-be70-4f0754a66d53" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 796.031436] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Lock "4335f92a-897a-4779-be70-4f0754a66d53" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 796.049401] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618149, 'name': Rename_Task, 'duration_secs': 0.202428} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.049732] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 796.050020] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a6da2893-7ffc-4369-adac-c4e46b4c4c39 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.057542] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 796.057542] env[70020]: value = "task-3618152" [ 796.057542] env[70020]: _type = "Task" [ 796.057542] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.068952] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618152, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.240633] env[70020]: DEBUG oslo_concurrency.lockutils [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Acquiring lock "da347659-5df6-4a3c-8a95-60fa3a598305" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 796.240997] env[70020]: DEBUG oslo_concurrency.lockutils [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Lock "da347659-5df6-4a3c-8a95-60fa3a598305" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 796.347745] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618150, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.359658] env[70020]: DEBUG nova.network.neutron [-] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.384094] env[70020]: DEBUG oslo_concurrency.lockutils [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Acquiring lock "refresh_cache-2198e7f8-5458-4b97-abb3-0a3c932cebc2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.384311] env[70020]: DEBUG oslo_concurrency.lockutils [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Acquired lock "refresh_cache-2198e7f8-5458-4b97-abb3-0a3c932cebc2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 796.384402] env[70020]: DEBUG nova.network.neutron [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 796.398231] env[70020]: DEBUG nova.network.neutron [req-ad876360-7fc9-4796-ba8d-a760820c04c2 req-a5b51ec4-b183-4206-84de-1dc881861551 service nova] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Updated VIF entry in instance network info cache for port dfb75973-77e7-42e3-96d1-4b8f24a37d6d. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 796.398231] env[70020]: DEBUG nova.network.neutron [req-ad876360-7fc9-4796-ba8d-a760820c04c2 req-a5b51ec4-b183-4206-84de-1dc881861551 service nova] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Updating instance_info_cache with network_info: [{"id": "dfb75973-77e7-42e3-96d1-4b8f24a37d6d", "address": "fa:16:3e:87:49:db", "network": {"id": "43f80db7-dce1-4f89-90ff-8ba5981812e4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-604035824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "19128323d60a4992b0a2f837317d3f04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdfb75973-77", "ovs_interfaceid": "dfb75973-77e7-42e3-96d1-4b8f24a37d6d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.433277] env[70020]: DEBUG nova.compute.manager [req-8a2ad4b7-455a-4893-b512-99cc3124dd95 req-17db16f7-84aa-43cb-acf3-00fe42ca92d0 service nova] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Received event network-vif-deleted-ce671578-5542-4458-8bd2-c23e89aa5b61 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 796.433515] env[70020]: INFO nova.compute.manager [req-8a2ad4b7-455a-4893-b512-99cc3124dd95 req-17db16f7-84aa-43cb-acf3-00fe42ca92d0 service nova] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Neutron deleted interface ce671578-5542-4458-8bd2-c23e89aa5b61; detaching it from the instance and deleting it from the info cache [ 796.434424] env[70020]: DEBUG nova.network.neutron [req-8a2ad4b7-455a-4893-b512-99cc3124dd95 req-17db16f7-84aa-43cb-acf3-00fe42ca92d0 service nova] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Updating instance_info_cache with network_info: [{"id": "58a7c4b8-c855-465c-8f72-5d93eccdbb81", "address": "fa:16:3e:4a:98:2c", "network": {"id": "460591ee-52d0-41ce-8c13-e045fecd0dca", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1907785980", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.132", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5ae2c1c42704f49854f86cca4f8a95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90328c7b-15c4-4742-805b-755248d67029", "external-id": "nsx-vlan-transportzone-860", "segmentation_id": 860, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58a7c4b8-c8", "ovs_interfaceid": "58a7c4b8-c855-465c-8f72-5d93eccdbb81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5b6b00cf-55ae-4c3e-a499-95e58b106387", "address": "fa:16:3e:48:03:20", "network": {"id": "460591ee-52d0-41ce-8c13-e045fecd0dca", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1907785980", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.168", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5ae2c1c42704f49854f86cca4f8a95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90328c7b-15c4-4742-805b-755248d67029", "external-id": "nsx-vlan-transportzone-860", "segmentation_id": 860, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b6b00cf-55", "ovs_interfaceid": "5b6b00cf-55ae-4c3e-a499-95e58b106387", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.576258] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618152, 'name': PowerOnVM_Task} progress is 78%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.592134] env[70020]: DEBUG nova.network.neutron [None req-ad844869-04b3-41ed-a19e-5dce24fcc30c tempest-ServersAdminTestJSON-561524990 tempest-ServersAdminTestJSON-561524990-project-admin] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Updating instance_info_cache with network_info: [{"id": "632c2a24-8d7d-4754-87e6-79e1f5f4b8bf", "address": "fa:16:3e:a5:ed:71", "network": {"id": "943dd639-07d5-4e71-8d72-01499704fb50", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-408902375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3a2dc07c1d447ea81ca142d80ab4210", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap632c2a24-8d", "ovs_interfaceid": "632c2a24-8d7d-4754-87e6-79e1f5f4b8bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.744015] env[70020]: DEBUG oslo_concurrency.lockutils [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Lock "da347659-5df6-4a3c-8a95-60fa3a598305" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.503s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 796.744702] env[70020]: DEBUG nova.compute.manager [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 796.837813] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618150, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.668136} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.838279] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 48efbd17-ff4e-426a-a135-f43cae8c97d0/48efbd17-ff4e-426a-a135-f43cae8c97d0.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 796.838560] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 796.838879] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2783a7e7-cde3-4be4-9c03-e600f8c26e64 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.845931] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 796.845931] env[70020]: value = "task-3618155" [ 796.845931] env[70020]: _type = "Task" [ 796.845931] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.855920] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618155, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.863533] env[70020]: INFO nova.compute.manager [-] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Took 1.66 seconds to deallocate network for instance. [ 796.902462] env[70020]: DEBUG oslo_concurrency.lockutils [req-ad876360-7fc9-4796-ba8d-a760820c04c2 req-a5b51ec4-b183-4206-84de-1dc881861551 service nova] Releasing lock "refresh_cache-48efbd17-ff4e-426a-a135-f43cae8c97d0" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 796.906301] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aff2d3a7-639f-4540-a21a-00a6b93fb846 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.919274] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1029d585-6e1a-46e4-ba6f-28482e5a675c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.927902] env[70020]: DEBUG nova.network.neutron [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 796.962847] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e2f5ef97-7c46-43fe-b072-da992651cd35 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.963244] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a7fb415-f12e-463b-8519-6a8ff3ef7efe {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.972536] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdbb50df-6fe7-4418-ab06-54b3f6999f19 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.979806] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4be2b26e-2b65-4d9f-b1d2-ee29a3e608e4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.006088] env[70020]: DEBUG nova.compute.provider_tree [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 797.022899] env[70020]: DEBUG nova.compute.manager [req-8a2ad4b7-455a-4893-b512-99cc3124dd95 req-17db16f7-84aa-43cb-acf3-00fe42ca92d0 service nova] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Detach interface failed, port_id=ce671578-5542-4458-8bd2-c23e89aa5b61, reason: Instance 0add6226-3b90-4991-8f2b-81c35e72a7df could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 797.071094] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618152, 'name': PowerOnVM_Task, 'duration_secs': 1.007738} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.074400] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 797.074640] env[70020]: INFO nova.compute.manager [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Took 9.21 seconds to spawn the instance on the hypervisor. [ 797.074827] env[70020]: DEBUG nova.compute.manager [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 797.076055] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2c8ef4b-9589-4312-9812-afaea1c1f660 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.098880] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ad844869-04b3-41ed-a19e-5dce24fcc30c tempest-ServersAdminTestJSON-561524990 tempest-ServersAdminTestJSON-561524990-project-admin] Releasing lock "refresh_cache-55c20886-ae10-4326-a9de-f8577f320a99" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 797.098880] env[70020]: DEBUG nova.compute.manager [None req-ad844869-04b3-41ed-a19e-5dce24fcc30c tempest-ServersAdminTestJSON-561524990 tempest-ServersAdminTestJSON-561524990-project-admin] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Inject network info {{(pid=70020) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 797.099094] env[70020]: DEBUG nova.compute.manager [None req-ad844869-04b3-41ed-a19e-5dce24fcc30c tempest-ServersAdminTestJSON-561524990 tempest-ServersAdminTestJSON-561524990-project-admin] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] network_info to inject: |[{"id": "632c2a24-8d7d-4754-87e6-79e1f5f4b8bf", "address": "fa:16:3e:a5:ed:71", "network": {"id": "943dd639-07d5-4e71-8d72-01499704fb50", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-408902375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3a2dc07c1d447ea81ca142d80ab4210", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap632c2a24-8d", "ovs_interfaceid": "632c2a24-8d7d-4754-87e6-79e1f5f4b8bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 797.105190] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ad844869-04b3-41ed-a19e-5dce24fcc30c tempest-ServersAdminTestJSON-561524990 tempest-ServersAdminTestJSON-561524990-project-admin] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Reconfiguring VM instance to set the machine id {{(pid=70020) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 797.106133] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-06da46b2-2610-46f2-8968-0740d5dfa346 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.118523] env[70020]: DEBUG nova.network.neutron [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Updating instance_info_cache with network_info: [{"id": "52cf3b73-bbee-4e96-91f2-a1caa2041501", "address": "fa:16:3e:13:c0:92", "network": {"id": "1d8737b3-4820-4ad1-8d76-9ab1a7db867e", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2006556938-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a63e8bb4fcd844f69aaeade95326a91b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f847601f-7479-48eb-842f-41f94eea8537", "external-id": "nsx-vlan-transportzone-35", "segmentation_id": 35, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52cf3b73-bb", "ovs_interfaceid": "52cf3b73-bbee-4e96-91f2-a1caa2041501", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.125612] env[70020]: DEBUG oslo_vmware.api [None req-ad844869-04b3-41ed-a19e-5dce24fcc30c tempest-ServersAdminTestJSON-561524990 tempest-ServersAdminTestJSON-561524990-project-admin] Waiting for the task: (returnval){ [ 797.125612] env[70020]: value = "task-3618156" [ 797.125612] env[70020]: _type = "Task" [ 797.125612] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.136114] env[70020]: DEBUG oslo_vmware.api [None req-ad844869-04b3-41ed-a19e-5dce24fcc30c tempest-ServersAdminTestJSON-561524990 tempest-ServersAdminTestJSON-561524990-project-admin] Task: {'id': task-3618156, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.249551] env[70020]: DEBUG nova.compute.utils [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 797.250673] env[70020]: DEBUG nova.compute.manager [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 797.253269] env[70020]: DEBUG nova.network.neutron [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 797.302114] env[70020]: DEBUG nova.policy [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f04086f8b9d8499d9e5df1e473050164', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '428dafa7c09940178257061b27baa232', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 797.357666] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618155, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.145396} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.357666] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 797.359219] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b8ef6dc-d539-45e0-bcf2-1540c67c3e6e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.373136] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1ef4b391-5fed-4275-80cb-ac68c9126c66 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 797.381767] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Reconfiguring VM instance instance-0000002e to attach disk [datastore2] 48efbd17-ff4e-426a-a135-f43cae8c97d0/48efbd17-ff4e-426a-a135-f43cae8c97d0.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 797.382098] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-15bc1dd1-357b-4ca3-949c-20e89c58ee16 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.402025] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 797.402025] env[70020]: value = "task-3618157" [ 797.402025] env[70020]: _type = "Task" [ 797.402025] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.411124] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618157, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.510045] env[70020]: DEBUG nova.scheduler.client.report [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 797.554880] env[70020]: DEBUG nova.network.neutron [-] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.572372] env[70020]: DEBUG nova.network.neutron [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Successfully created port: d57e8cdf-1b06-49d3-ba61-715ba529bb2e {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 797.595954] env[70020]: INFO nova.compute.manager [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Took 64.70 seconds to build instance. [ 797.621455] env[70020]: DEBUG oslo_concurrency.lockutils [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Releasing lock "refresh_cache-2198e7f8-5458-4b97-abb3-0a3c932cebc2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 797.621817] env[70020]: DEBUG nova.compute.manager [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Instance network_info: |[{"id": "52cf3b73-bbee-4e96-91f2-a1caa2041501", "address": "fa:16:3e:13:c0:92", "network": {"id": "1d8737b3-4820-4ad1-8d76-9ab1a7db867e", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2006556938-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a63e8bb4fcd844f69aaeade95326a91b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f847601f-7479-48eb-842f-41f94eea8537", "external-id": "nsx-vlan-transportzone-35", "segmentation_id": 35, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52cf3b73-bb", "ovs_interfaceid": "52cf3b73-bbee-4e96-91f2-a1caa2041501", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 797.622276] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:13:c0:92', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f847601f-7479-48eb-842f-41f94eea8537', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '52cf3b73-bbee-4e96-91f2-a1caa2041501', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 797.630198] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Creating folder: Project (a63e8bb4fcd844f69aaeade95326a91b). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 797.631267] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a97bdc93-7fe7-4548-ac58-500ac12cc789 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.644028] env[70020]: DEBUG oslo_vmware.api [None req-ad844869-04b3-41ed-a19e-5dce24fcc30c tempest-ServersAdminTestJSON-561524990 tempest-ServersAdminTestJSON-561524990-project-admin] Task: {'id': task-3618156, 'name': ReconfigVM_Task, 'duration_secs': 0.178684} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.644957] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ad844869-04b3-41ed-a19e-5dce24fcc30c tempest-ServersAdminTestJSON-561524990 tempest-ServersAdminTestJSON-561524990-project-admin] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Reconfigured VM instance to set the machine id {{(pid=70020) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 797.646211] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Created folder: Project (a63e8bb4fcd844f69aaeade95326a91b) in parent group-v721521. [ 797.646470] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Creating folder: Instances. Parent ref: group-v721667. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 797.646789] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6c1733ac-68cb-4294-8a43-429798337106 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.657530] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Created folder: Instances in parent group-v721667. [ 797.657816] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 797.658177] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 797.658306] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7a0ab2a7-6f6e-4947-a5e4-fe7c9d897066 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.682410] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 797.682410] env[70020]: value = "task-3618160" [ 797.682410] env[70020]: _type = "Task" [ 797.682410] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.692525] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618160, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.754293] env[70020]: DEBUG nova.compute.manager [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 797.809030] env[70020]: DEBUG nova.compute.manager [req-f15aad4b-cdde-42f4-82d8-fca6c73a4da3 req-23b4996f-bb5e-4911-abab-b19ce7a851fd service nova] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Received event network-vif-plugged-52cf3b73-bbee-4e96-91f2-a1caa2041501 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 797.809030] env[70020]: DEBUG oslo_concurrency.lockutils [req-f15aad4b-cdde-42f4-82d8-fca6c73a4da3 req-23b4996f-bb5e-4911-abab-b19ce7a851fd service nova] Acquiring lock "2198e7f8-5458-4b97-abb3-0a3c932cebc2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 797.809030] env[70020]: DEBUG oslo_concurrency.lockutils [req-f15aad4b-cdde-42f4-82d8-fca6c73a4da3 req-23b4996f-bb5e-4911-abab-b19ce7a851fd service nova] Lock "2198e7f8-5458-4b97-abb3-0a3c932cebc2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 797.809030] env[70020]: DEBUG oslo_concurrency.lockutils [req-f15aad4b-cdde-42f4-82d8-fca6c73a4da3 req-23b4996f-bb5e-4911-abab-b19ce7a851fd service nova] Lock "2198e7f8-5458-4b97-abb3-0a3c932cebc2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 797.809030] env[70020]: DEBUG nova.compute.manager [req-f15aad4b-cdde-42f4-82d8-fca6c73a4da3 req-23b4996f-bb5e-4911-abab-b19ce7a851fd service nova] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] No waiting events found dispatching network-vif-plugged-52cf3b73-bbee-4e96-91f2-a1caa2041501 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 797.809328] env[70020]: WARNING nova.compute.manager [req-f15aad4b-cdde-42f4-82d8-fca6c73a4da3 req-23b4996f-bb5e-4911-abab-b19ce7a851fd service nova] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Received unexpected event network-vif-plugged-52cf3b73-bbee-4e96-91f2-a1caa2041501 for instance with vm_state building and task_state spawning. [ 797.809328] env[70020]: DEBUG nova.compute.manager [req-f15aad4b-cdde-42f4-82d8-fca6c73a4da3 req-23b4996f-bb5e-4911-abab-b19ce7a851fd service nova] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Received event network-vif-deleted-271f324e-5244-40df-9393-7b0a123839bb {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 797.809452] env[70020]: DEBUG nova.compute.manager [req-f15aad4b-cdde-42f4-82d8-fca6c73a4da3 req-23b4996f-bb5e-4911-abab-b19ce7a851fd service nova] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Received event network-changed-52cf3b73-bbee-4e96-91f2-a1caa2041501 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 797.809626] env[70020]: DEBUG nova.compute.manager [req-f15aad4b-cdde-42f4-82d8-fca6c73a4da3 req-23b4996f-bb5e-4911-abab-b19ce7a851fd service nova] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Refreshing instance network info cache due to event network-changed-52cf3b73-bbee-4e96-91f2-a1caa2041501. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 797.809819] env[70020]: DEBUG oslo_concurrency.lockutils [req-f15aad4b-cdde-42f4-82d8-fca6c73a4da3 req-23b4996f-bb5e-4911-abab-b19ce7a851fd service nova] Acquiring lock "refresh_cache-2198e7f8-5458-4b97-abb3-0a3c932cebc2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.809952] env[70020]: DEBUG oslo_concurrency.lockutils [req-f15aad4b-cdde-42f4-82d8-fca6c73a4da3 req-23b4996f-bb5e-4911-abab-b19ce7a851fd service nova] Acquired lock "refresh_cache-2198e7f8-5458-4b97-abb3-0a3c932cebc2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 797.810112] env[70020]: DEBUG nova.network.neutron [req-f15aad4b-cdde-42f4-82d8-fca6c73a4da3 req-23b4996f-bb5e-4911-abab-b19ce7a851fd service nova] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Refreshing network info cache for port 52cf3b73-bbee-4e96-91f2-a1caa2041501 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 797.913900] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618157, 'name': ReconfigVM_Task, 'duration_secs': 0.278233} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.914049] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Reconfigured VM instance instance-0000002e to attach disk [datastore2] 48efbd17-ff4e-426a-a135-f43cae8c97d0/48efbd17-ff4e-426a-a135-f43cae8c97d0.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 797.915282] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1b6df07f-9310-49eb-bd54-72968ea96717 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.923218] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 797.923218] env[70020]: value = "task-3618161" [ 797.923218] env[70020]: _type = "Task" [ 797.923218] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.932049] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618161, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.017179] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.275s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 798.020399] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0602c3e1-8d09-42a1-933d-ef69b164ac49 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.307s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 798.021085] env[70020]: DEBUG nova.objects.instance [None req-0602c3e1-8d09-42a1-933d-ef69b164ac49 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Lazy-loading 'resources' on Instance uuid 738d52c6-0368-434f-a14f-05b47ca865e3 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 798.049254] env[70020]: INFO nova.scheduler.client.report [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Deleted allocations for instance b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e [ 798.059230] env[70020]: INFO nova.compute.manager [-] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Took 2.26 seconds to deallocate network for instance. [ 798.098355] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "c56279e2-0fc6-4546-854c-82e5fda0e7a7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 88.449s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 798.192138] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618160, 'name': CreateVM_Task, 'duration_secs': 0.414875} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.192278] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 798.192996] env[70020]: DEBUG oslo_concurrency.lockutils [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.193393] env[70020]: DEBUG oslo_concurrency.lockutils [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 798.193492] env[70020]: DEBUG oslo_concurrency.lockutils [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 798.193737] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f96fcd12-6de6-4ef1-b19c-f3bbf10a6a09 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.198364] env[70020]: DEBUG oslo_vmware.api [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for the task: (returnval){ [ 798.198364] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52dbc059-c9de-f56b-bed1-558e226430b0" [ 798.198364] env[70020]: _type = "Task" [ 798.198364] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.205990] env[70020]: DEBUG oslo_vmware.api [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52dbc059-c9de-f56b-bed1-558e226430b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.439971] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618161, 'name': Rename_Task, 'duration_secs': 0.147989} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.440337] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 798.440599] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-340cbbf6-4fe1-4df5-857f-b79dd698a4f6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.447566] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 798.447566] env[70020]: value = "task-3618163" [ 798.447566] env[70020]: _type = "Task" [ 798.447566] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.457952] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618163, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.567823] env[70020]: DEBUG oslo_concurrency.lockutils [None req-895ed8e8-53ea-4732-9f9c-013a1d5d7bc7 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 798.568197] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a3e13cb-b09a-462d-aa20-f4bbd46e41bd tempest-ServersTestBootFromVolume-2114082365 tempest-ServersTestBootFromVolume-2114082365-project-member] Lock "b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.739s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 798.603253] env[70020]: DEBUG nova.compute.manager [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 798.679033] env[70020]: DEBUG nova.compute.manager [req-e25c2eb1-15b8-4900-bc87-c084c5761352 req-2a368b38-f1ee-4ccb-a88e-73586456ba88 service nova] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Received event network-vif-deleted-5b6b00cf-55ae-4c3e-a499-95e58b106387 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 798.679480] env[70020]: DEBUG nova.compute.manager [req-e25c2eb1-15b8-4900-bc87-c084c5761352 req-2a368b38-f1ee-4ccb-a88e-73586456ba88 service nova] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Received event network-vif-deleted-58a7c4b8-c855-465c-8f72-5d93eccdbb81 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 798.708744] env[70020]: DEBUG oslo_vmware.api [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52dbc059-c9de-f56b-bed1-558e226430b0, 'name': SearchDatastore_Task, 'duration_secs': 0.010547} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.713139] env[70020]: DEBUG oslo_concurrency.lockutils [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 798.713139] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 798.713139] env[70020]: DEBUG oslo_concurrency.lockutils [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.713139] env[70020]: DEBUG oslo_concurrency.lockutils [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 798.713139] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 798.713469] env[70020]: DEBUG nova.network.neutron [req-f15aad4b-cdde-42f4-82d8-fca6c73a4da3 req-23b4996f-bb5e-4911-abab-b19ce7a851fd service nova] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Updated VIF entry in instance network info cache for port 52cf3b73-bbee-4e96-91f2-a1caa2041501. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 798.713576] env[70020]: DEBUG nova.network.neutron [req-f15aad4b-cdde-42f4-82d8-fca6c73a4da3 req-23b4996f-bb5e-4911-abab-b19ce7a851fd service nova] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Updating instance_info_cache with network_info: [{"id": "52cf3b73-bbee-4e96-91f2-a1caa2041501", "address": "fa:16:3e:13:c0:92", "network": {"id": "1d8737b3-4820-4ad1-8d76-9ab1a7db867e", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2006556938-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a63e8bb4fcd844f69aaeade95326a91b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f847601f-7479-48eb-842f-41f94eea8537", "external-id": "nsx-vlan-transportzone-35", "segmentation_id": 35, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52cf3b73-bb", "ovs_interfaceid": "52cf3b73-bbee-4e96-91f2-a1caa2041501", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.714891] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a99bffcc-8b44-4f69-ae47-1df58336e2b1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.724623] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 798.724820] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 798.726260] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6455cd4-f21d-43f7-b97a-e79b3b27327b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.736020] env[70020]: DEBUG oslo_vmware.api [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for the task: (returnval){ [ 798.736020] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52b4ca6d-29d4-0310-8da9-eec5b154c5e0" [ 798.736020] env[70020]: _type = "Task" [ 798.736020] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.745270] env[70020]: DEBUG oslo_vmware.api [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b4ca6d-29d4-0310-8da9-eec5b154c5e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.770643] env[70020]: DEBUG nova.compute.manager [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 798.796835] env[70020]: DEBUG nova.virt.hardware [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 798.797135] env[70020]: DEBUG nova.virt.hardware [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 798.797334] env[70020]: DEBUG nova.virt.hardware [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 798.797567] env[70020]: DEBUG nova.virt.hardware [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 798.797745] env[70020]: DEBUG nova.virt.hardware [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 798.797912] env[70020]: DEBUG nova.virt.hardware [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 798.798172] env[70020]: DEBUG nova.virt.hardware [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 798.798356] env[70020]: DEBUG nova.virt.hardware [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 798.798549] env[70020]: DEBUG nova.virt.hardware [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 798.798750] env[70020]: DEBUG nova.virt.hardware [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 798.798943] env[70020]: DEBUG nova.virt.hardware [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 798.800272] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-202ad17e-f756-46a0-ad8d-4ed8ad61d6c9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.810835] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76490a20-ec9a-4930-851e-cc9cc2eab69c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.959996] env[70020]: DEBUG oslo_vmware.api [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618163, 'name': PowerOnVM_Task, 'duration_secs': 0.507865} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.960305] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 798.960705] env[70020]: INFO nova.compute.manager [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Took 6.59 seconds to spawn the instance on the hypervisor. [ 798.960914] env[70020]: DEBUG nova.compute.manager [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 798.967021] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69ff9c0c-69e7-4552-88fe-8dbf51bd3397 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.128348] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 799.138592] env[70020]: DEBUG nova.network.neutron [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Successfully updated port: d57e8cdf-1b06-49d3-ba61-715ba529bb2e {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 799.139552] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-579cb7f8-f5b5-471a-bf7d-b64369ee22da {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.147086] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4158033-6593-43f5-a7d8-55a13f753d45 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.181311] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4edde142-b505-41c1-b829-057f9e98a3d1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.190203] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e835b911-6124-4880-a986-2dfa7caa4a00 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.206011] env[70020]: DEBUG nova.compute.provider_tree [None req-0602c3e1-8d09-42a1-933d-ef69b164ac49 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 799.219443] env[70020]: DEBUG oslo_concurrency.lockutils [req-f15aad4b-cdde-42f4-82d8-fca6c73a4da3 req-23b4996f-bb5e-4911-abab-b19ce7a851fd service nova] Releasing lock "refresh_cache-2198e7f8-5458-4b97-abb3-0a3c932cebc2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 799.246400] env[70020]: DEBUG oslo_vmware.api [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b4ca6d-29d4-0310-8da9-eec5b154c5e0, 'name': SearchDatastore_Task, 'duration_secs': 0.009486} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.247044] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c29fe6a2-8da8-4d14-bcfd-2862bf43cc0d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.253611] env[70020]: DEBUG oslo_vmware.api [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for the task: (returnval){ [ 799.253611] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52e2a6fc-c947-04c1-706a-e91d436d3abd" [ 799.253611] env[70020]: _type = "Task" [ 799.253611] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.259810] env[70020]: DEBUG oslo_vmware.api [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52e2a6fc-c947-04c1-706a-e91d436d3abd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.487643] env[70020]: INFO nova.compute.manager [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Took 57.61 seconds to build instance. [ 799.643822] env[70020]: DEBUG oslo_concurrency.lockutils [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Acquiring lock "refresh_cache-36f15b0a-d57f-49d8-9510-1036e889a438" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.644016] env[70020]: DEBUG oslo_concurrency.lockutils [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Acquired lock "refresh_cache-36f15b0a-d57f-49d8-9510-1036e889a438" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 799.644199] env[70020]: DEBUG nova.network.neutron [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 799.711095] env[70020]: DEBUG nova.scheduler.client.report [None req-0602c3e1-8d09-42a1-933d-ef69b164ac49 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 799.764386] env[70020]: DEBUG oslo_vmware.api [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52e2a6fc-c947-04c1-706a-e91d436d3abd, 'name': SearchDatastore_Task, 'duration_secs': 0.028847} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.764658] env[70020]: DEBUG oslo_concurrency.lockutils [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 799.764927] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 2198e7f8-5458-4b97-abb3-0a3c932cebc2/2198e7f8-5458-4b97-abb3-0a3c932cebc2.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 799.765532] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9f1adf90-0062-4032-84ec-47a6e5fef4be {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.774713] env[70020]: DEBUG oslo_vmware.api [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for the task: (returnval){ [ 799.774713] env[70020]: value = "task-3618164" [ 799.774713] env[70020]: _type = "Task" [ 799.774713] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.783700] env[70020]: DEBUG oslo_vmware.api [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618164, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.893609] env[70020]: DEBUG nova.compute.manager [req-d626eadb-ac0e-4714-b4ea-857c03e469e9 req-07c3f73d-d5e6-4584-8329-f26a919e3976 service nova] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Received event network-vif-plugged-d57e8cdf-1b06-49d3-ba61-715ba529bb2e {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 799.894195] env[70020]: DEBUG oslo_concurrency.lockutils [req-d626eadb-ac0e-4714-b4ea-857c03e469e9 req-07c3f73d-d5e6-4584-8329-f26a919e3976 service nova] Acquiring lock "36f15b0a-d57f-49d8-9510-1036e889a438-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 799.894195] env[70020]: DEBUG oslo_concurrency.lockutils [req-d626eadb-ac0e-4714-b4ea-857c03e469e9 req-07c3f73d-d5e6-4584-8329-f26a919e3976 service nova] Lock "36f15b0a-d57f-49d8-9510-1036e889a438-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 799.894439] env[70020]: DEBUG oslo_concurrency.lockutils [req-d626eadb-ac0e-4714-b4ea-857c03e469e9 req-07c3f73d-d5e6-4584-8329-f26a919e3976 service nova] Lock "36f15b0a-d57f-49d8-9510-1036e889a438-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 799.894533] env[70020]: DEBUG nova.compute.manager [req-d626eadb-ac0e-4714-b4ea-857c03e469e9 req-07c3f73d-d5e6-4584-8329-f26a919e3976 service nova] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] No waiting events found dispatching network-vif-plugged-d57e8cdf-1b06-49d3-ba61-715ba529bb2e {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 799.894717] env[70020]: WARNING nova.compute.manager [req-d626eadb-ac0e-4714-b4ea-857c03e469e9 req-07c3f73d-d5e6-4584-8329-f26a919e3976 service nova] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Received unexpected event network-vif-plugged-d57e8cdf-1b06-49d3-ba61-715ba529bb2e for instance with vm_state building and task_state spawning. [ 799.895040] env[70020]: DEBUG nova.compute.manager [req-d626eadb-ac0e-4714-b4ea-857c03e469e9 req-07c3f73d-d5e6-4584-8329-f26a919e3976 service nova] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Received event network-changed-d57e8cdf-1b06-49d3-ba61-715ba529bb2e {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 799.895520] env[70020]: DEBUG nova.compute.manager [req-d626eadb-ac0e-4714-b4ea-857c03e469e9 req-07c3f73d-d5e6-4584-8329-f26a919e3976 service nova] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Refreshing instance network info cache due to event network-changed-d57e8cdf-1b06-49d3-ba61-715ba529bb2e. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 799.895753] env[70020]: DEBUG oslo_concurrency.lockutils [req-d626eadb-ac0e-4714-b4ea-857c03e469e9 req-07c3f73d-d5e6-4584-8329-f26a919e3976 service nova] Acquiring lock "refresh_cache-36f15b0a-d57f-49d8-9510-1036e889a438" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.947661] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Acquiring lock "1d9218db-05d8-4e33-837f-e9865946237f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 799.948013] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Lock "1d9218db-05d8-4e33-837f-e9865946237f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 799.990248] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fd363767-d0e4-4e9d-909e-5725ae197596 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "48efbd17-ff4e-426a-a135-f43cae8c97d0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 90.302s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 800.198427] env[70020]: DEBUG nova.network.neutron [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 800.217378] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0602c3e1-8d09-42a1-933d-ef69b164ac49 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.197s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 800.220020] env[70020]: DEBUG oslo_concurrency.lockutils [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.483s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 800.223526] env[70020]: INFO nova.compute.claims [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] [instance: b99195a6-866e-4142-970a-42a0564889ef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 800.256479] env[70020]: INFO nova.scheduler.client.report [None req-0602c3e1-8d09-42a1-933d-ef69b164ac49 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Deleted allocations for instance 738d52c6-0368-434f-a14f-05b47ca865e3 [ 800.293279] env[70020]: DEBUG oslo_vmware.api [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618164, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.357744] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b9a2b3dd-7d2e-481c-a073-48b4a3dc10d7 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquiring lock "c56279e2-0fc6-4546-854c-82e5fda0e7a7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 800.358206] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b9a2b3dd-7d2e-481c-a073-48b4a3dc10d7 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "c56279e2-0fc6-4546-854c-82e5fda0e7a7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 800.358531] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b9a2b3dd-7d2e-481c-a073-48b4a3dc10d7 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquiring lock "c56279e2-0fc6-4546-854c-82e5fda0e7a7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 800.358754] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b9a2b3dd-7d2e-481c-a073-48b4a3dc10d7 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "c56279e2-0fc6-4546-854c-82e5fda0e7a7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 800.358999] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b9a2b3dd-7d2e-481c-a073-48b4a3dc10d7 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "c56279e2-0fc6-4546-854c-82e5fda0e7a7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 800.361990] env[70020]: INFO nova.compute.manager [None req-b9a2b3dd-7d2e-481c-a073-48b4a3dc10d7 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Terminating instance [ 800.363939] env[70020]: INFO nova.compute.manager [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Rebuilding instance [ 800.399220] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-736a3162-3a75-4f3b-b006-191e0012565e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Volume attach. Driver type: vmdk {{(pid=70020) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 800.399489] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-736a3162-3a75-4f3b-b006-191e0012565e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721666', 'volume_id': '098e0380-2259-4c44-9668-fe847c3f06db', 'name': 'volume-098e0380-2259-4c44-9668-fe847c3f06db', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c08166c5-2c31-4d40-a61c-c541924eb49c', 'attached_at': '', 'detached_at': '', 'volume_id': '098e0380-2259-4c44-9668-fe847c3f06db', 'serial': '098e0380-2259-4c44-9668-fe847c3f06db'} {{(pid=70020) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 800.400380] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a63802f-d7e0-4472-88ce-4d060dda4f5f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.430334] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aae25823-ebdc-4d2a-99e9-0069fa17f4fc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.481755] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-736a3162-3a75-4f3b-b006-191e0012565e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Reconfiguring VM instance instance-0000002b to attach disk [datastore2] volume-098e0380-2259-4c44-9668-fe847c3f06db/volume-098e0380-2259-4c44-9668-fe847c3f06db.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 800.483977] env[70020]: DEBUG nova.network.neutron [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Updating instance_info_cache with network_info: [{"id": "d57e8cdf-1b06-49d3-ba61-715ba529bb2e", "address": "fa:16:3e:8b:b9:87", "network": {"id": "b7223c0a-6db0-479c-a821-513de5987088", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-510309593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "428dafa7c09940178257061b27baa232", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55bd18a7-39a8-4d07-9088-9b944f9ff710", "external-id": "nsx-vlan-transportzone-686", "segmentation_id": 686, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd57e8cdf-1b", "ovs_interfaceid": "d57e8cdf-1b06-49d3-ba61-715ba529bb2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 800.488411] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-beb9cfa0-017a-4000-bd31-7925c195eae7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.513822] env[70020]: DEBUG nova.compute.manager [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 800.515812] env[70020]: DEBUG oslo_concurrency.lockutils [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Releasing lock "refresh_cache-36f15b0a-d57f-49d8-9510-1036e889a438" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 800.516280] env[70020]: DEBUG nova.compute.manager [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Instance network_info: |[{"id": "d57e8cdf-1b06-49d3-ba61-715ba529bb2e", "address": "fa:16:3e:8b:b9:87", "network": {"id": "b7223c0a-6db0-479c-a821-513de5987088", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-510309593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "428dafa7c09940178257061b27baa232", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55bd18a7-39a8-4d07-9088-9b944f9ff710", "external-id": "nsx-vlan-transportzone-686", "segmentation_id": 686, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd57e8cdf-1b", "ovs_interfaceid": "d57e8cdf-1b06-49d3-ba61-715ba529bb2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 800.517210] env[70020]: DEBUG nova.compute.manager [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 800.522040] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd6f2420-696e-4f9d-8d0c-f46052ce1758 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.526782] env[70020]: DEBUG oslo_concurrency.lockutils [req-d626eadb-ac0e-4714-b4ea-857c03e469e9 req-07c3f73d-d5e6-4584-8329-f26a919e3976 service nova] Acquired lock "refresh_cache-36f15b0a-d57f-49d8-9510-1036e889a438" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 800.527195] env[70020]: DEBUG nova.network.neutron [req-d626eadb-ac0e-4714-b4ea-857c03e469e9 req-07c3f73d-d5e6-4584-8329-f26a919e3976 service nova] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Refreshing network info cache for port d57e8cdf-1b06-49d3-ba61-715ba529bb2e {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 800.529187] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:b9:87', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '55bd18a7-39a8-4d07-9088-9b944f9ff710', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd57e8cdf-1b06-49d3-ba61-715ba529bb2e', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 800.546365] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Creating folder: Project (428dafa7c09940178257061b27baa232). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 800.552120] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bf25a631-ae56-4089-b56e-9630c5747de1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.562033] env[70020]: DEBUG oslo_vmware.api [None req-736a3162-3a75-4f3b-b006-191e0012565e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 800.562033] env[70020]: value = "task-3618166" [ 800.562033] env[70020]: _type = "Task" [ 800.562033] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.573260] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Created folder: Project (428dafa7c09940178257061b27baa232) in parent group-v721521. [ 800.573597] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Creating folder: Instances. Parent ref: group-v721670. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 800.574574] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-23c78cdd-1130-4d05-833f-561ef3579599 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.581990] env[70020]: DEBUG oslo_vmware.api [None req-736a3162-3a75-4f3b-b006-191e0012565e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618166, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.589705] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Created folder: Instances in parent group-v721670. [ 800.590033] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 800.590312] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 800.590615] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f6caae77-b73f-45b0-8e2e-3e07c6cc4b11 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.619238] env[70020]: DEBUG oslo_concurrency.lockutils [None req-acec40ea-eb4b-49de-9535-9e8609bfd877 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquiring lock "48efbd17-ff4e-426a-a135-f43cae8c97d0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 800.619579] env[70020]: DEBUG oslo_concurrency.lockutils [None req-acec40ea-eb4b-49de-9535-9e8609bfd877 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "48efbd17-ff4e-426a-a135-f43cae8c97d0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 800.619877] env[70020]: DEBUG oslo_concurrency.lockutils [None req-acec40ea-eb4b-49de-9535-9e8609bfd877 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquiring lock "48efbd17-ff4e-426a-a135-f43cae8c97d0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 800.620171] env[70020]: DEBUG oslo_concurrency.lockutils [None req-acec40ea-eb4b-49de-9535-9e8609bfd877 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "48efbd17-ff4e-426a-a135-f43cae8c97d0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 800.620475] env[70020]: DEBUG oslo_concurrency.lockutils [None req-acec40ea-eb4b-49de-9535-9e8609bfd877 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "48efbd17-ff4e-426a-a135-f43cae8c97d0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 800.624694] env[70020]: INFO nova.compute.manager [None req-acec40ea-eb4b-49de-9535-9e8609bfd877 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Terminating instance [ 800.628924] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 800.628924] env[70020]: value = "task-3618169" [ 800.628924] env[70020]: _type = "Task" [ 800.628924] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.642852] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618169, 'name': CreateVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.766237] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0602c3e1-8d09-42a1-933d-ef69b164ac49 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Lock "738d52c6-0368-434f-a14f-05b47ca865e3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.622s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 800.789270] env[70020]: DEBUG oslo_vmware.api [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618164, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.67734} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.789515] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 2198e7f8-5458-4b97-abb3-0a3c932cebc2/2198e7f8-5458-4b97-abb3-0a3c932cebc2.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 800.789772] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 800.790032] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-59afe581-d9b4-4982-a0d1-e6a222cc2c79 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.796355] env[70020]: DEBUG oslo_vmware.api [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for the task: (returnval){ [ 800.796355] env[70020]: value = "task-3618170" [ 800.796355] env[70020]: _type = "Task" [ 800.796355] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.804741] env[70020]: DEBUG oslo_vmware.api [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618170, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.870653] env[70020]: DEBUG nova.compute.manager [None req-b9a2b3dd-7d2e-481c-a073-48b4a3dc10d7 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 800.870895] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b9a2b3dd-7d2e-481c-a073-48b4a3dc10d7 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 800.871884] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e0e97f1-6f67-4e57-8f09-8cabcedcc3f4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.882023] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9a2b3dd-7d2e-481c-a073-48b4a3dc10d7 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 800.882300] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-73717a26-8865-44fe-8a11-fec8e2982e43 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.889088] env[70020]: DEBUG oslo_vmware.api [None req-b9a2b3dd-7d2e-481c-a073-48b4a3dc10d7 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 800.889088] env[70020]: value = "task-3618171" [ 800.889088] env[70020]: _type = "Task" [ 800.889088] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.897818] env[70020]: DEBUG oslo_vmware.api [None req-b9a2b3dd-7d2e-481c-a073-48b4a3dc10d7 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618171, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.925461] env[70020]: DEBUG nova.network.neutron [req-d626eadb-ac0e-4714-b4ea-857c03e469e9 req-07c3f73d-d5e6-4584-8329-f26a919e3976 service nova] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Updated VIF entry in instance network info cache for port d57e8cdf-1b06-49d3-ba61-715ba529bb2e. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 800.925945] env[70020]: DEBUG nova.network.neutron [req-d626eadb-ac0e-4714-b4ea-857c03e469e9 req-07c3f73d-d5e6-4584-8329-f26a919e3976 service nova] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Updating instance_info_cache with network_info: [{"id": "d57e8cdf-1b06-49d3-ba61-715ba529bb2e", "address": "fa:16:3e:8b:b9:87", "network": {"id": "b7223c0a-6db0-479c-a821-513de5987088", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-510309593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "428dafa7c09940178257061b27baa232", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55bd18a7-39a8-4d07-9088-9b944f9ff710", "external-id": "nsx-vlan-transportzone-686", "segmentation_id": 686, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd57e8cdf-1b", "ovs_interfaceid": "d57e8cdf-1b06-49d3-ba61-715ba529bb2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 801.073137] env[70020]: DEBUG oslo_vmware.api [None req-736a3162-3a75-4f3b-b006-191e0012565e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618166, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.074501] env[70020]: DEBUG oslo_concurrency.lockutils [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 801.131023] env[70020]: DEBUG nova.compute.manager [None req-acec40ea-eb4b-49de-9535-9e8609bfd877 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 801.131023] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-acec40ea-eb4b-49de-9535-9e8609bfd877 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 801.131023] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9d89eb4-b29a-4fdb-912d-7122cddb08bb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.143138] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618169, 'name': CreateVM_Task, 'duration_secs': 0.503625} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.147207] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 801.147207] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-acec40ea-eb4b-49de-9535-9e8609bfd877 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 801.147207] env[70020]: DEBUG oslo_concurrency.lockutils [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.147207] env[70020]: DEBUG oslo_concurrency.lockutils [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 801.147903] env[70020]: DEBUG oslo_concurrency.lockutils [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 801.149249] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ea4fd5ea-cac1-4dfd-a304-0dc139dd9aa0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.151137] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25ed9a96-1362-4e77-84c4-e7df3542c065 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.156917] env[70020]: DEBUG oslo_vmware.api [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Waiting for the task: (returnval){ [ 801.156917] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]523b43dd-cb3b-460a-77d8-476dd393f528" [ 801.156917] env[70020]: _type = "Task" [ 801.156917] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.164281] env[70020]: DEBUG oslo_vmware.api [None req-acec40ea-eb4b-49de-9535-9e8609bfd877 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 801.164281] env[70020]: value = "task-3618172" [ 801.164281] env[70020]: _type = "Task" [ 801.164281] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.171787] env[70020]: DEBUG oslo_vmware.api [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]523b43dd-cb3b-460a-77d8-476dd393f528, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.178202] env[70020]: DEBUG oslo_vmware.api [None req-acec40ea-eb4b-49de-9535-9e8609bfd877 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618172, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.311019] env[70020]: DEBUG oslo_vmware.api [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618170, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066446} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.311019] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 801.311019] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-133006be-8152-4ed0-a25d-d75b9fd5931c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.332956] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] 2198e7f8-5458-4b97-abb3-0a3c932cebc2/2198e7f8-5458-4b97-abb3-0a3c932cebc2.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 801.336012] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e65e891a-6431-4b89-814c-c589e8432b8e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.356488] env[70020]: DEBUG oslo_vmware.api [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for the task: (returnval){ [ 801.356488] env[70020]: value = "task-3618173" [ 801.356488] env[70020]: _type = "Task" [ 801.356488] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.370860] env[70020]: DEBUG oslo_vmware.api [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618173, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.398048] env[70020]: DEBUG oslo_vmware.api [None req-b9a2b3dd-7d2e-481c-a073-48b4a3dc10d7 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618171, 'name': PowerOffVM_Task, 'duration_secs': 0.397194} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.401569] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9a2b3dd-7d2e-481c-a073-48b4a3dc10d7 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 801.401849] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b9a2b3dd-7d2e-481c-a073-48b4a3dc10d7 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 801.402375] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ab40f392-9461-49ae-90c1-499f66dce65a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.429731] env[70020]: DEBUG oslo_concurrency.lockutils [req-d626eadb-ac0e-4714-b4ea-857c03e469e9 req-07c3f73d-d5e6-4584-8329-f26a919e3976 service nova] Releasing lock "refresh_cache-36f15b0a-d57f-49d8-9510-1036e889a438" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 801.475808] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b9a2b3dd-7d2e-481c-a073-48b4a3dc10d7 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 801.475808] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b9a2b3dd-7d2e-481c-a073-48b4a3dc10d7 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 801.475808] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9a2b3dd-7d2e-481c-a073-48b4a3dc10d7 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Deleting the datastore file [datastore2] c56279e2-0fc6-4546-854c-82e5fda0e7a7 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 801.486652] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-76dc38a3-d514-4de2-8a29-e34e82df17f3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.491185] env[70020]: DEBUG oslo_vmware.api [None req-b9a2b3dd-7d2e-481c-a073-48b4a3dc10d7 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 801.491185] env[70020]: value = "task-3618175" [ 801.491185] env[70020]: _type = "Task" [ 801.491185] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.500793] env[70020]: DEBUG oslo_vmware.api [None req-b9a2b3dd-7d2e-481c-a073-48b4a3dc10d7 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618175, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.573494] env[70020]: DEBUG oslo_vmware.api [None req-736a3162-3a75-4f3b-b006-191e0012565e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618166, 'name': ReconfigVM_Task, 'duration_secs': 0.904657} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.573790] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-736a3162-3a75-4f3b-b006-191e0012565e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Reconfigured VM instance instance-0000002b to attach disk [datastore2] volume-098e0380-2259-4c44-9668-fe847c3f06db/volume-098e0380-2259-4c44-9668-fe847c3f06db.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 801.581865] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 801.582295] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-26cb83bf-8bea-4ff2-85c2-fcd813b15801 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.596724] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4bb0dc4f-a52b-4d40-9175-2bb6cabff405 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.604172] env[70020]: DEBUG oslo_vmware.api [None req-736a3162-3a75-4f3b-b006-191e0012565e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 801.604172] env[70020]: value = "task-3618176" [ 801.604172] env[70020]: _type = "Task" [ 801.604172] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.605809] env[70020]: DEBUG oslo_vmware.api [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 801.605809] env[70020]: value = "task-3618177" [ 801.605809] env[70020]: _type = "Task" [ 801.605809] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.621428] env[70020]: DEBUG oslo_vmware.api [None req-736a3162-3a75-4f3b-b006-191e0012565e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618176, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.626146] env[70020]: DEBUG oslo_vmware.api [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618177, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.675530] env[70020]: DEBUG oslo_vmware.api [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]523b43dd-cb3b-460a-77d8-476dd393f528, 'name': SearchDatastore_Task, 'duration_secs': 0.019726} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.680274] env[70020]: DEBUG oslo_concurrency.lockutils [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 801.680728] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 801.681074] env[70020]: DEBUG oslo_concurrency.lockutils [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.681334] env[70020]: DEBUG oslo_concurrency.lockutils [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 801.681912] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 801.686473] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae24c44d-5a23-42b0-8c48-0f7b1ed2dda9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.692233] env[70020]: DEBUG oslo_vmware.api [None req-acec40ea-eb4b-49de-9535-9e8609bfd877 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618172, 'name': PowerOffVM_Task, 'duration_secs': 0.268497} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.692233] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-acec40ea-eb4b-49de-9535-9e8609bfd877 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 801.692233] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-acec40ea-eb4b-49de-9535-9e8609bfd877 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 801.692233] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0f9ff4f5-c700-4228-a231-53f55c87a75b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.699207] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 801.699626] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 801.704028] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-431b7ce3-02db-4aab-869f-9456d74de613 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.710504] env[70020]: DEBUG oslo_vmware.api [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Waiting for the task: (returnval){ [ 801.710504] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52e543f8-7886-2b29-6ccf-16d256c63a03" [ 801.710504] env[70020]: _type = "Task" [ 801.710504] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.719822] env[70020]: DEBUG oslo_vmware.api [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52e543f8-7886-2b29-6ccf-16d256c63a03, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.783349] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-acec40ea-eb4b-49de-9535-9e8609bfd877 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 801.784036] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-acec40ea-eb4b-49de-9535-9e8609bfd877 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 801.784036] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-acec40ea-eb4b-49de-9535-9e8609bfd877 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Deleting the datastore file [datastore2] 48efbd17-ff4e-426a-a135-f43cae8c97d0 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 801.784189] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cab132c4-2c38-4d85-9861-226ac81bb01c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.792608] env[70020]: DEBUG oslo_vmware.api [None req-acec40ea-eb4b-49de-9535-9e8609bfd877 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 801.792608] env[70020]: value = "task-3618179" [ 801.792608] env[70020]: _type = "Task" [ 801.792608] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.808275] env[70020]: DEBUG oslo_vmware.api [None req-acec40ea-eb4b-49de-9535-9e8609bfd877 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618179, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.873548] env[70020]: DEBUG oslo_vmware.api [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618173, 'name': ReconfigVM_Task, 'duration_secs': 0.358233} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.874628] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Reconfigured VM instance instance-0000002f to attach disk [datastore1] 2198e7f8-5458-4b97-abb3-0a3c932cebc2/2198e7f8-5458-4b97-abb3-0a3c932cebc2.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 801.876813] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0b693ac6-5513-49d4-bf0f-0735ff7f0155 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.883555] env[70020]: DEBUG oslo_vmware.api [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for the task: (returnval){ [ 801.883555] env[70020]: value = "task-3618180" [ 801.883555] env[70020]: _type = "Task" [ 801.883555] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.893122] env[70020]: DEBUG oslo_vmware.api [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618180, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.944740] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-576a70fd-9bb6-466c-8972-e13c6df75345 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.952742] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5e9ce80-85b8-461a-b794-25fc041a945b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.984542] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ee19dbc-2b20-4fb7-b151-5d66e5889e88 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.995935] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fac6455d-3e68-4af7-8a9c-c957923428f3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.009590] env[70020]: DEBUG nova.compute.provider_tree [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 802.016764] env[70020]: DEBUG oslo_vmware.api [None req-b9a2b3dd-7d2e-481c-a073-48b4a3dc10d7 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618175, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.303787} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.020439] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9a2b3dd-7d2e-481c-a073-48b4a3dc10d7 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 802.020589] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b9a2b3dd-7d2e-481c-a073-48b4a3dc10d7 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 802.020757] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b9a2b3dd-7d2e-481c-a073-48b4a3dc10d7 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 802.020951] env[70020]: INFO nova.compute.manager [None req-b9a2b3dd-7d2e-481c-a073-48b4a3dc10d7 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Took 1.15 seconds to destroy the instance on the hypervisor. [ 802.021211] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b9a2b3dd-7d2e-481c-a073-48b4a3dc10d7 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 802.022028] env[70020]: DEBUG nova.compute.manager [-] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 802.022171] env[70020]: DEBUG nova.network.neutron [-] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 802.122409] env[70020]: DEBUG oslo_vmware.api [None req-736a3162-3a75-4f3b-b006-191e0012565e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618176, 'name': ReconfigVM_Task, 'duration_secs': 0.160499} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.122748] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-736a3162-3a75-4f3b-b006-191e0012565e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721666', 'volume_id': '098e0380-2259-4c44-9668-fe847c3f06db', 'name': 'volume-098e0380-2259-4c44-9668-fe847c3f06db', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c08166c5-2c31-4d40-a61c-c541924eb49c', 'attached_at': '', 'detached_at': '', 'volume_id': '098e0380-2259-4c44-9668-fe847c3f06db', 'serial': '098e0380-2259-4c44-9668-fe847c3f06db'} {{(pid=70020) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 802.127772] env[70020]: DEBUG oslo_vmware.api [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618177, 'name': PowerOffVM_Task, 'duration_secs': 0.215219} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.128240] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 802.128462] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 802.129937] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a88303f-95c2-40eb-a39b-a2bc0b976329 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.135872] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 802.137091] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-64731ec6-8268-4026-9d84-d704c8eff6b5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.195650] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 802.195842] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 802.195954] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Deleting the datastore file [datastore2] ea97f6ab-057e-44d3-835a-68b46d241621 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 802.196528] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e406db77-d682-4145-beb4-cf317ba4d606 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.202049] env[70020]: DEBUG oslo_vmware.api [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 802.202049] env[70020]: value = "task-3618182" [ 802.202049] env[70020]: _type = "Task" [ 802.202049] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.209949] env[70020]: DEBUG oslo_vmware.api [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618182, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.218933] env[70020]: DEBUG oslo_vmware.api [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52e543f8-7886-2b29-6ccf-16d256c63a03, 'name': SearchDatastore_Task, 'duration_secs': 0.019251} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.219690] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0c6571d-cbef-4e4e-8c07-ca0e0124ec63 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.224619] env[70020]: DEBUG oslo_vmware.api [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Waiting for the task: (returnval){ [ 802.224619] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52ee97b2-7c56-4c56-b217-8a395817d793" [ 802.224619] env[70020]: _type = "Task" [ 802.224619] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.231978] env[70020]: DEBUG oslo_vmware.api [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ee97b2-7c56-4c56-b217-8a395817d793, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.306285] env[70020]: DEBUG oslo_vmware.api [None req-acec40ea-eb4b-49de-9535-9e8609bfd877 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618179, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.255267} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.306526] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-acec40ea-eb4b-49de-9535-9e8609bfd877 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 802.306726] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-acec40ea-eb4b-49de-9535-9e8609bfd877 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 802.306923] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-acec40ea-eb4b-49de-9535-9e8609bfd877 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 802.307135] env[70020]: INFO nova.compute.manager [None req-acec40ea-eb4b-49de-9535-9e8609bfd877 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Took 1.18 seconds to destroy the instance on the hypervisor. [ 802.307396] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-acec40ea-eb4b-49de-9535-9e8609bfd877 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 802.307603] env[70020]: DEBUG nova.compute.manager [-] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 802.307697] env[70020]: DEBUG nova.network.neutron [-] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 802.337696] env[70020]: DEBUG nova.compute.manager [req-585a2d3d-e1aa-4758-8646-556575b24d63 req-17f4c6a3-d078-42c9-be85-df8b5f634ec7 service nova] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Received event network-vif-deleted-181209a0-c7c5-4fb9-ba9a-7f87cc194836 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 802.338088] env[70020]: INFO nova.compute.manager [req-585a2d3d-e1aa-4758-8646-556575b24d63 req-17f4c6a3-d078-42c9-be85-df8b5f634ec7 service nova] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Neutron deleted interface 181209a0-c7c5-4fb9-ba9a-7f87cc194836; detaching it from the instance and deleting it from the info cache [ 802.338134] env[70020]: DEBUG nova.network.neutron [req-585a2d3d-e1aa-4758-8646-556575b24d63 req-17f4c6a3-d078-42c9-be85-df8b5f634ec7 service nova] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.393344] env[70020]: DEBUG oslo_vmware.api [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618180, 'name': Rename_Task, 'duration_secs': 0.150018} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.393608] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 802.393850] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6b239e4a-4961-4ec6-baae-4fbfe3d2c47a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.403022] env[70020]: DEBUG oslo_vmware.api [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for the task: (returnval){ [ 802.403022] env[70020]: value = "task-3618183" [ 802.403022] env[70020]: _type = "Task" [ 802.403022] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.407763] env[70020]: DEBUG oslo_vmware.api [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618183, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.520193] env[70020]: DEBUG nova.scheduler.client.report [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 802.711303] env[70020]: DEBUG oslo_vmware.api [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618182, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154537} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.711496] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 802.711678] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 802.711931] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 802.738838] env[70020]: DEBUG oslo_vmware.api [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ee97b2-7c56-4c56-b217-8a395817d793, 'name': SearchDatastore_Task, 'duration_secs': 0.012453} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.739274] env[70020]: DEBUG oslo_concurrency.lockutils [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 802.739700] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 36f15b0a-d57f-49d8-9510-1036e889a438/36f15b0a-d57f-49d8-9510-1036e889a438.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 802.740082] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-601ff341-7310-4296-8f2b-eb1317c3042c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.748165] env[70020]: DEBUG oslo_vmware.api [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Waiting for the task: (returnval){ [ 802.748165] env[70020]: value = "task-3618184" [ 802.748165] env[70020]: _type = "Task" [ 802.748165] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.759918] env[70020]: DEBUG oslo_vmware.api [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Task: {'id': task-3618184, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.762192] env[70020]: DEBUG nova.compute.manager [req-d79d0a52-04ec-40fc-a3f4-2a5a46d484ce req-8d757bb9-4141-4032-9500-c9b5baca440c service nova] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Received event network-vif-deleted-dfb75973-77e7-42e3-96d1-4b8f24a37d6d {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 802.762669] env[70020]: INFO nova.compute.manager [req-d79d0a52-04ec-40fc-a3f4-2a5a46d484ce req-8d757bb9-4141-4032-9500-c9b5baca440c service nova] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Neutron deleted interface dfb75973-77e7-42e3-96d1-4b8f24a37d6d; detaching it from the instance and deleting it from the info cache [ 802.763315] env[70020]: DEBUG nova.network.neutron [req-d79d0a52-04ec-40fc-a3f4-2a5a46d484ce req-8d757bb9-4141-4032-9500-c9b5baca440c service nova] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.818344] env[70020]: DEBUG nova.network.neutron [-] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.845713] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5b34c49c-5550-4908-b70c-6e75bbcd29b6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.860791] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cd2ac9a-91b0-4922-bdd2-328e98c72bf1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.914477] env[70020]: DEBUG nova.compute.manager [req-585a2d3d-e1aa-4758-8646-556575b24d63 req-17f4c6a3-d078-42c9-be85-df8b5f634ec7 service nova] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Detach interface failed, port_id=181209a0-c7c5-4fb9-ba9a-7f87cc194836, reason: Instance c56279e2-0fc6-4546-854c-82e5fda0e7a7 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 802.924959] env[70020]: DEBUG oslo_vmware.api [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618183, 'name': PowerOnVM_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.026685] env[70020]: DEBUG oslo_concurrency.lockutils [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.807s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 803.027261] env[70020]: DEBUG nova.compute.manager [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] [instance: b99195a6-866e-4142-970a-42a0564889ef] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 803.030349] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.543s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 803.034499] env[70020]: INFO nova.compute.claims [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 803.121018] env[70020]: DEBUG nova.network.neutron [-] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.170361] env[70020]: DEBUG nova.objects.instance [None req-736a3162-3a75-4f3b-b006-191e0012565e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lazy-loading 'flavor' on Instance uuid c08166c5-2c31-4d40-a61c-c541924eb49c {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 803.261874] env[70020]: DEBUG oslo_vmware.api [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Task: {'id': task-3618184, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.470523} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.261874] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 36f15b0a-d57f-49d8-9510-1036e889a438/36f15b0a-d57f-49d8-9510-1036e889a438.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 803.261874] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 803.261874] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d7116286-3010-4f2f-a831-2849ace0bd4a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.266302] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5a43a08e-8373-4daf-90d0-5d08117f92af {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.270110] env[70020]: DEBUG oslo_vmware.api [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Waiting for the task: (returnval){ [ 803.270110] env[70020]: value = "task-3618185" [ 803.270110] env[70020]: _type = "Task" [ 803.270110] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.277776] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dc5c5fb-10ae-4abf-8064-0ef083de5a1e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.291850] env[70020]: DEBUG oslo_vmware.api [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Task: {'id': task-3618185, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.315157] env[70020]: DEBUG nova.compute.manager [req-d79d0a52-04ec-40fc-a3f4-2a5a46d484ce req-8d757bb9-4141-4032-9500-c9b5baca440c service nova] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Detach interface failed, port_id=dfb75973-77e7-42e3-96d1-4b8f24a37d6d, reason: Instance 48efbd17-ff4e-426a-a135-f43cae8c97d0 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 803.320970] env[70020]: INFO nova.compute.manager [-] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Took 1.30 seconds to deallocate network for instance. [ 803.423490] env[70020]: DEBUG oslo_vmware.api [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618183, 'name': PowerOnVM_Task, 'duration_secs': 0.594643} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.424264] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 803.424465] env[70020]: INFO nova.compute.manager [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Took 8.37 seconds to spawn the instance on the hypervisor. [ 803.424645] env[70020]: DEBUG nova.compute.manager [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 803.425420] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42cae566-19a4-4241-9532-564d6473ce8c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.537734] env[70020]: DEBUG nova.compute.utils [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 803.541764] env[70020]: DEBUG nova.compute.manager [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] [instance: b99195a6-866e-4142-970a-42a0564889ef] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 803.542048] env[70020]: DEBUG nova.network.neutron [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] [instance: b99195a6-866e-4142-970a-42a0564889ef] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 803.590176] env[70020]: DEBUG nova.policy [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd23beb17cb03479196612e60f449f85a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '51e12151b4a44683868b3fbfc0bb5fa9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 803.625340] env[70020]: INFO nova.compute.manager [-] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Took 1.32 seconds to deallocate network for instance. [ 803.678371] env[70020]: DEBUG oslo_concurrency.lockutils [None req-736a3162-3a75-4f3b-b006-191e0012565e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "c08166c5-2c31-4d40-a61c-c541924eb49c" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.938s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 803.755588] env[70020]: DEBUG nova.virt.hardware [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 803.755845] env[70020]: DEBUG nova.virt.hardware [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 803.756053] env[70020]: DEBUG nova.virt.hardware [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 803.756192] env[70020]: DEBUG nova.virt.hardware [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 803.756344] env[70020]: DEBUG nova.virt.hardware [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 803.756475] env[70020]: DEBUG nova.virt.hardware [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 803.756681] env[70020]: DEBUG nova.virt.hardware [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 803.756837] env[70020]: DEBUG nova.virt.hardware [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 803.759111] env[70020]: DEBUG nova.virt.hardware [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 803.759406] env[70020]: DEBUG nova.virt.hardware [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 803.759621] env[70020]: DEBUG nova.virt.hardware [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 803.761391] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b9a24f2-dcdd-491c-88eb-540e374ca94b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.777734] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4029313-b936-4e20-9d07-82583bc94b23 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.790074] env[70020]: DEBUG oslo_vmware.api [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Task: {'id': task-3618185, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.0646} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.806187] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 803.806794] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:aa:ce:b2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39ab9baf-90cd-4fe2-8d56-434f8210fc19', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6348da2f-b0bd-499f-bf5e-b14a38d29438', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 803.814706] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 803.815448] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87739911-5a88-4846-b9c0-b1d79ac0bc89 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.819254] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 803.819474] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-85a412aa-130d-40dc-98aa-e78db0bd11a1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.837451] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b9a2b3dd-7d2e-481c-a073-48b4a3dc10d7 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 803.856892] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] 36f15b0a-d57f-49d8-9510-1036e889a438/36f15b0a-d57f-49d8-9510-1036e889a438.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 803.858469] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ebac593f-7422-4847-98c4-af3906927f87 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.873662] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 803.873662] env[70020]: value = "task-3618186" [ 803.873662] env[70020]: _type = "Task" [ 803.873662] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.879362] env[70020]: DEBUG oslo_vmware.api [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Waiting for the task: (returnval){ [ 803.879362] env[70020]: value = "task-3618187" [ 803.879362] env[70020]: _type = "Task" [ 803.879362] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.882912] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618186, 'name': CreateVM_Task} progress is 15%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.892330] env[70020]: DEBUG oslo_vmware.api [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Task: {'id': task-3618187, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.915126] env[70020]: DEBUG oslo_concurrency.lockutils [None req-740bbd20-87d5-43df-b46d-c5d497d062b8 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Acquiring lock "08ce6bc8-30fe-4c63-80e1-26c84ae75702" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 803.915440] env[70020]: DEBUG oslo_concurrency.lockutils [None req-740bbd20-87d5-43df-b46d-c5d497d062b8 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "08ce6bc8-30fe-4c63-80e1-26c84ae75702" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 803.945394] env[70020]: INFO nova.compute.manager [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Took 49.34 seconds to build instance. [ 804.045067] env[70020]: DEBUG nova.compute.manager [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] [instance: b99195a6-866e-4142-970a-42a0564889ef] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 804.131764] env[70020]: DEBUG oslo_concurrency.lockutils [None req-acec40ea-eb4b-49de-9535-9e8609bfd877 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 804.178321] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9c7b241b-b181-42d5-8145-f051dc8a0875 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Acquiring lock "1f95bfa8-bc97-4ed7-8c33-c00297430bf5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 804.179525] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9c7b241b-b181-42d5-8145-f051dc8a0875 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Lock "1f95bfa8-bc97-4ed7-8c33-c00297430bf5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 804.179525] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9c7b241b-b181-42d5-8145-f051dc8a0875 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Acquiring lock "1f95bfa8-bc97-4ed7-8c33-c00297430bf5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 804.179525] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9c7b241b-b181-42d5-8145-f051dc8a0875 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Lock "1f95bfa8-bc97-4ed7-8c33-c00297430bf5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 804.179525] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9c7b241b-b181-42d5-8145-f051dc8a0875 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Lock "1f95bfa8-bc97-4ed7-8c33-c00297430bf5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 804.183404] env[70020]: INFO nova.compute.manager [None req-9c7b241b-b181-42d5-8145-f051dc8a0875 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Terminating instance [ 804.188363] env[70020]: DEBUG nova.network.neutron [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] [instance: b99195a6-866e-4142-970a-42a0564889ef] Successfully created port: 9cdec97e-ce57-46c5-8d8f-1425a3452a72 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 804.390385] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618186, 'name': CreateVM_Task} progress is 25%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.401733] env[70020]: DEBUG oslo_vmware.api [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Task: {'id': task-3618187, 'name': ReconfigVM_Task, 'duration_secs': 0.276405} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.402759] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Reconfigured VM instance instance-00000030 to attach disk [datastore1] 36f15b0a-d57f-49d8-9510-1036e889a438/36f15b0a-d57f-49d8-9510-1036e889a438.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 804.402941] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2782e147-541b-4689-af53-58c1544847e0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.414792] env[70020]: DEBUG oslo_vmware.api [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Waiting for the task: (returnval){ [ 804.414792] env[70020]: value = "task-3618188" [ 804.414792] env[70020]: _type = "Task" [ 804.414792] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.420231] env[70020]: INFO nova.compute.manager [None req-740bbd20-87d5-43df-b46d-c5d497d062b8 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Detaching volume d8414132-451b-4d65-a184-bdc8c5deb6c9 [ 804.437119] env[70020]: DEBUG oslo_vmware.api [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Task: {'id': task-3618188, 'name': Rename_Task} progress is 10%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.453143] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "c08166c5-2c31-4d40-a61c-c541924eb49c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 804.453143] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "c08166c5-2c31-4d40-a61c-c541924eb49c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 804.453349] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "c08166c5-2c31-4d40-a61c-c541924eb49c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 804.453571] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "c08166c5-2c31-4d40-a61c-c541924eb49c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 804.453833] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "c08166c5-2c31-4d40-a61c-c541924eb49c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 804.455657] env[70020]: DEBUG oslo_concurrency.lockutils [None req-69b7ac10-235a-4d4d-86bb-f5d33f225b18 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Lock "2198e7f8-5458-4b97-abb3-0a3c932cebc2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 88.290s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 804.461702] env[70020]: INFO nova.compute.manager [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Terminating instance [ 804.489280] env[70020]: INFO nova.virt.block_device [None req-740bbd20-87d5-43df-b46d-c5d497d062b8 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Attempting to driver detach volume d8414132-451b-4d65-a184-bdc8c5deb6c9 from mountpoint /dev/sdb [ 804.489523] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-740bbd20-87d5-43df-b46d-c5d497d062b8 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Volume detach. Driver type: vmdk {{(pid=70020) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 804.489794] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-740bbd20-87d5-43df-b46d-c5d497d062b8 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721646', 'volume_id': 'd8414132-451b-4d65-a184-bdc8c5deb6c9', 'name': 'volume-d8414132-451b-4d65-a184-bdc8c5deb6c9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '08ce6bc8-30fe-4c63-80e1-26c84ae75702', 'attached_at': '', 'detached_at': '', 'volume_id': 'd8414132-451b-4d65-a184-bdc8c5deb6c9', 'serial': 'd8414132-451b-4d65-a184-bdc8c5deb6c9'} {{(pid=70020) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 804.490623] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a84c9c4-1eda-49b0-9359-451570c3a8e8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.523209] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01a3bc24-a077-4c15-b2fa-12b01f182b82 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.530827] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d15da28-7372-4883-8629-da681200ef70 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.564992] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e50c2932-910a-48df-87b9-4da8f5fcd159 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.581198] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-740bbd20-87d5-43df-b46d-c5d497d062b8 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] The volume has not been displaced from its original location: [datastore1] volume-d8414132-451b-4d65-a184-bdc8c5deb6c9/volume-d8414132-451b-4d65-a184-bdc8c5deb6c9.vmdk. No consolidation needed. {{(pid=70020) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 804.586365] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-740bbd20-87d5-43df-b46d-c5d497d062b8 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Reconfiguring VM instance instance-0000000f to detach disk 2001 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 804.588376] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6c2d1f0-40f5-4b89-abf5-cf20487bba68 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.609019] env[70020]: DEBUG oslo_vmware.api [None req-740bbd20-87d5-43df-b46d-c5d497d062b8 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for the task: (returnval){ [ 804.609019] env[70020]: value = "task-3618189" [ 804.609019] env[70020]: _type = "Task" [ 804.609019] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.615217] env[70020]: DEBUG oslo_vmware.api [None req-740bbd20-87d5-43df-b46d-c5d497d062b8 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618189, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.691145] env[70020]: DEBUG nova.compute.manager [None req-9c7b241b-b181-42d5-8145-f051dc8a0875 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 804.691145] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9c7b241b-b181-42d5-8145-f051dc8a0875 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 804.692269] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b83f2c37-345c-4816-b317-005ad57db942 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.699895] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c7b241b-b181-42d5-8145-f051dc8a0875 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 804.700221] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c071c6b6-5f01-4eb3-a82d-ae897c744527 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.709063] env[70020]: DEBUG oslo_vmware.api [None req-9c7b241b-b181-42d5-8145-f051dc8a0875 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Waiting for the task: (returnval){ [ 804.709063] env[70020]: value = "task-3618190" [ 804.709063] env[70020]: _type = "Task" [ 804.709063] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.718020] env[70020]: DEBUG oslo_vmware.api [None req-9c7b241b-b181-42d5-8145-f051dc8a0875 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3618190, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.735322] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb1920fa-a595-434b-b658-d7ff430664ff {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.743941] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc3529a7-6ce1-40b2-884d-8a5a2fde6767 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.778998] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59e67a4a-64fb-45b1-a69a-d2d659a2bf99 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.788259] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d44da2d6-90d5-4809-aecb-b3a9f0d52d2a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.802393] env[70020]: DEBUG nova.compute.provider_tree [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 804.886275] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618186, 'name': CreateVM_Task, 'duration_secs': 0.834755} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.887057] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 804.887282] env[70020]: DEBUG oslo_concurrency.lockutils [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.887427] env[70020]: DEBUG oslo_concurrency.lockutils [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 804.887749] env[70020]: DEBUG oslo_concurrency.lockutils [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 804.888020] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3acdfa2b-23a9-4805-9fea-9af07b1fbe5e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.894461] env[70020]: DEBUG oslo_vmware.api [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 804.894461] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52d16436-befe-8519-7e02-b5430a89b739" [ 804.894461] env[70020]: _type = "Task" [ 804.894461] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.903458] env[70020]: DEBUG oslo_vmware.api [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52d16436-befe-8519-7e02-b5430a89b739, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.925535] env[70020]: DEBUG oslo_vmware.api [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Task: {'id': task-3618188, 'name': Rename_Task, 'duration_secs': 0.147595} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.925535] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 804.925535] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c3f5ed53-44a3-4a15-b494-5495a0277a59 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.930713] env[70020]: DEBUG oslo_vmware.api [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Waiting for the task: (returnval){ [ 804.930713] env[70020]: value = "task-3618191" [ 804.930713] env[70020]: _type = "Task" [ 804.930713] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.939161] env[70020]: DEBUG oslo_vmware.api [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Task: {'id': task-3618191, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.961562] env[70020]: DEBUG nova.compute.manager [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 804.967576] env[70020]: DEBUG nova.compute.manager [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 804.967807] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 804.968088] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b845e233-5cd6-47c2-979a-a4afe06db958 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.974566] env[70020]: DEBUG oslo_vmware.api [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 804.974566] env[70020]: value = "task-3618192" [ 804.974566] env[70020]: _type = "Task" [ 804.974566] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.984499] env[70020]: DEBUG oslo_vmware.api [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618192, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.064353] env[70020]: DEBUG nova.compute.manager [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] [instance: b99195a6-866e-4142-970a-42a0564889ef] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 805.097721] env[70020]: DEBUG nova.virt.hardware [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 805.097721] env[70020]: DEBUG nova.virt.hardware [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 805.097852] env[70020]: DEBUG nova.virt.hardware [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 805.099667] env[70020]: DEBUG nova.virt.hardware [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 805.099667] env[70020]: DEBUG nova.virt.hardware [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 805.099667] env[70020]: DEBUG nova.virt.hardware [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 805.099667] env[70020]: DEBUG nova.virt.hardware [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 805.099667] env[70020]: DEBUG nova.virt.hardware [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 805.099667] env[70020]: DEBUG nova.virt.hardware [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 805.099667] env[70020]: DEBUG nova.virt.hardware [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 805.100038] env[70020]: DEBUG nova.virt.hardware [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 805.100829] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16132154-6e50-490f-93b6-fa7b0660c821 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.114539] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6c8cca4-f582-4f0e-b16c-a818c5712e30 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.125865] env[70020]: DEBUG oslo_vmware.api [None req-740bbd20-87d5-43df-b46d-c5d497d062b8 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618189, 'name': ReconfigVM_Task, 'duration_secs': 0.265733} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.135587] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-740bbd20-87d5-43df-b46d-c5d497d062b8 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Reconfigured VM instance instance-0000000f to detach disk 2001 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 805.141438] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-243b90f9-c4d8-4aa2-84ee-ce5bf5db3cd1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.156043] env[70020]: DEBUG oslo_vmware.api [None req-740bbd20-87d5-43df-b46d-c5d497d062b8 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for the task: (returnval){ [ 805.156043] env[70020]: value = "task-3618193" [ 805.156043] env[70020]: _type = "Task" [ 805.156043] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.164573] env[70020]: DEBUG oslo_vmware.api [None req-740bbd20-87d5-43df-b46d-c5d497d062b8 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618193, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.221361] env[70020]: DEBUG oslo_vmware.api [None req-9c7b241b-b181-42d5-8145-f051dc8a0875 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3618190, 'name': PowerOffVM_Task, 'duration_secs': 0.205549} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.222038] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c7b241b-b181-42d5-8145-f051dc8a0875 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 805.222038] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9c7b241b-b181-42d5-8145-f051dc8a0875 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 805.222186] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7a486faa-3663-4797-8160-56c172dd938b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.305667] env[70020]: DEBUG nova.scheduler.client.report [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 805.395121] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Acquiring lock "d3dbc3d1-bba7-4803-bacb-02de27a6a4ff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 805.395775] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Lock "d3dbc3d1-bba7-4803-bacb-02de27a6a4ff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 805.407045] env[70020]: DEBUG oslo_vmware.api [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52d16436-befe-8519-7e02-b5430a89b739, 'name': SearchDatastore_Task, 'duration_secs': 0.009635} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.407235] env[70020]: DEBUG oslo_concurrency.lockutils [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 805.407475] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 805.407709] env[70020]: DEBUG oslo_concurrency.lockutils [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.407855] env[70020]: DEBUG oslo_concurrency.lockutils [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 805.408046] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 805.408312] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d5844900-ad94-4f31-b7d8-6980027bbbb2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.429332] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 805.429534] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 805.434240] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-471880c1-fd03-4b6e-afd5-1819c33b4fe1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.450463] env[70020]: DEBUG oslo_vmware.api [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 805.450463] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]529b8258-360d-4018-0569-6dd141257885" [ 805.450463] env[70020]: _type = "Task" [ 805.450463] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.455219] env[70020]: DEBUG oslo_vmware.api [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Task: {'id': task-3618191, 'name': PowerOnVM_Task, 'duration_secs': 0.503543} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.459386] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 805.459640] env[70020]: INFO nova.compute.manager [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Took 6.69 seconds to spawn the instance on the hypervisor. [ 805.459867] env[70020]: DEBUG nova.compute.manager [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 805.460653] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d6da1b0-1092-43d5-b22a-d47ac5363ad2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.470344] env[70020]: DEBUG oslo_vmware.api [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]529b8258-360d-4018-0569-6dd141257885, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.487371] env[70020]: DEBUG oslo_vmware.api [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618192, 'name': PowerOffVM_Task, 'duration_secs': 0.179014} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.487682] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 805.487885] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Volume detach. Driver type: vmdk {{(pid=70020) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 805.488098] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721666', 'volume_id': '098e0380-2259-4c44-9668-fe847c3f06db', 'name': 'volume-098e0380-2259-4c44-9668-fe847c3f06db', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c08166c5-2c31-4d40-a61c-c541924eb49c', 'attached_at': '', 'detached_at': '', 'volume_id': '098e0380-2259-4c44-9668-fe847c3f06db', 'serial': '098e0380-2259-4c44-9668-fe847c3f06db'} {{(pid=70020) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 805.489818] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f902b094-8b75-43b6-9d0a-563676d67e9e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.492273] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 805.513652] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b197d20-8b17-471b-876a-f60d4b0da821 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.521595] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3ded391-ea5f-4559-a004-97369d15837a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.550863] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d78e805b-ac9e-4115-96df-5f4f064abb8d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.568633] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] The volume has not been displaced from its original location: [datastore2] volume-098e0380-2259-4c44-9668-fe847c3f06db/volume-098e0380-2259-4c44-9668-fe847c3f06db.vmdk. No consolidation needed. {{(pid=70020) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 805.575813] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Reconfiguring VM instance instance-0000002b to detach disk 2001 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 805.575813] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5c5f4816-7c06-45d2-ac87-f146a1f8ec91 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.593825] env[70020]: DEBUG oslo_vmware.api [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 805.593825] env[70020]: value = "task-3618195" [ 805.593825] env[70020]: _type = "Task" [ 805.593825] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.601849] env[70020]: DEBUG oslo_vmware.api [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618195, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.667691] env[70020]: DEBUG oslo_vmware.api [None req-740bbd20-87d5-43df-b46d-c5d497d062b8 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618193, 'name': ReconfigVM_Task, 'duration_secs': 0.15997} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.668100] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-740bbd20-87d5-43df-b46d-c5d497d062b8 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721646', 'volume_id': 'd8414132-451b-4d65-a184-bdc8c5deb6c9', 'name': 'volume-d8414132-451b-4d65-a184-bdc8c5deb6c9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '08ce6bc8-30fe-4c63-80e1-26c84ae75702', 'attached_at': '', 'detached_at': '', 'volume_id': 'd8414132-451b-4d65-a184-bdc8c5deb6c9', 'serial': 'd8414132-451b-4d65-a184-bdc8c5deb6c9'} {{(pid=70020) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 805.811412] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.781s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 805.812111] env[70020]: DEBUG nova.compute.manager [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 805.815314] env[70020]: DEBUG oslo_concurrency.lockutils [None req-103b08c1-83fe-416e-9a36-9f32f9cf69cd tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.603s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 805.815542] env[70020]: DEBUG nova.objects.instance [None req-103b08c1-83fe-416e-9a36-9f32f9cf69cd tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Lazy-loading 'resources' on Instance uuid ae91adc5-b3a4-4503-91f2-d803eaefedc5 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 805.971022] env[70020]: DEBUG oslo_vmware.api [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]529b8258-360d-4018-0569-6dd141257885, 'name': SearchDatastore_Task, 'duration_secs': 0.023255} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.971022] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fdba91ca-fc6f-4fea-b667-0cb94ce30c02 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.974370] env[70020]: DEBUG nova.compute.manager [req-36e5d734-3c6c-4954-97e4-d00be53dcbcc req-23c01334-8b82-4c65-8f4f-8f861bbeaa89 service nova] [instance: b99195a6-866e-4142-970a-42a0564889ef] Received event network-vif-plugged-9cdec97e-ce57-46c5-8d8f-1425a3452a72 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 805.974586] env[70020]: DEBUG oslo_concurrency.lockutils [req-36e5d734-3c6c-4954-97e4-d00be53dcbcc req-23c01334-8b82-4c65-8f4f-8f861bbeaa89 service nova] Acquiring lock "b99195a6-866e-4142-970a-42a0564889ef-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 805.974800] env[70020]: DEBUG oslo_concurrency.lockutils [req-36e5d734-3c6c-4954-97e4-d00be53dcbcc req-23c01334-8b82-4c65-8f4f-8f861bbeaa89 service nova] Lock "b99195a6-866e-4142-970a-42a0564889ef-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 805.974961] env[70020]: DEBUG oslo_concurrency.lockutils [req-36e5d734-3c6c-4954-97e4-d00be53dcbcc req-23c01334-8b82-4c65-8f4f-8f861bbeaa89 service nova] Lock "b99195a6-866e-4142-970a-42a0564889ef-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 805.975165] env[70020]: DEBUG nova.compute.manager [req-36e5d734-3c6c-4954-97e4-d00be53dcbcc req-23c01334-8b82-4c65-8f4f-8f861bbeaa89 service nova] [instance: b99195a6-866e-4142-970a-42a0564889ef] No waiting events found dispatching network-vif-plugged-9cdec97e-ce57-46c5-8d8f-1425a3452a72 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 805.975364] env[70020]: WARNING nova.compute.manager [req-36e5d734-3c6c-4954-97e4-d00be53dcbcc req-23c01334-8b82-4c65-8f4f-8f861bbeaa89 service nova] [instance: b99195a6-866e-4142-970a-42a0564889ef] Received unexpected event network-vif-plugged-9cdec97e-ce57-46c5-8d8f-1425a3452a72 for instance with vm_state building and task_state spawning. [ 805.984027] env[70020]: DEBUG oslo_vmware.api [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 805.984027] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5288b75e-849f-ec24-77f6-e70c1aae91c7" [ 805.984027] env[70020]: _type = "Task" [ 805.984027] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.987475] env[70020]: INFO nova.compute.manager [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Took 33.15 seconds to build instance. [ 805.997099] env[70020]: DEBUG oslo_vmware.api [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5288b75e-849f-ec24-77f6-e70c1aae91c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.103305] env[70020]: DEBUG oslo_vmware.api [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618195, 'name': ReconfigVM_Task, 'duration_secs': 0.267874} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.103305] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Reconfigured VM instance instance-0000002b to detach disk 2001 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 806.107972] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-28bf59ad-0806-406c-ad34-ef5bbab07bb8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.127024] env[70020]: DEBUG oslo_vmware.api [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 806.127024] env[70020]: value = "task-3618196" [ 806.127024] env[70020]: _type = "Task" [ 806.127024] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.132279] env[70020]: DEBUG oslo_vmware.api [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618196, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.150681] env[70020]: DEBUG nova.network.neutron [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] [instance: b99195a6-866e-4142-970a-42a0564889ef] Successfully updated port: 9cdec97e-ce57-46c5-8d8f-1425a3452a72 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 806.174236] env[70020]: DEBUG oslo_concurrency.lockutils [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Acquiring lock "422ca332-5952-443c-a22e-67b1b45df5b9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 806.174422] env[70020]: DEBUG oslo_concurrency.lockutils [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Lock "422ca332-5952-443c-a22e-67b1b45df5b9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 806.236371] env[70020]: DEBUG nova.objects.instance [None req-740bbd20-87d5-43df-b46d-c5d497d062b8 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lazy-loading 'flavor' on Instance uuid 08ce6bc8-30fe-4c63-80e1-26c84ae75702 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 806.322682] env[70020]: DEBUG nova.compute.utils [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 806.327020] env[70020]: DEBUG nova.compute.manager [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 806.327020] env[70020]: DEBUG nova.network.neutron [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 806.382217] env[70020]: DEBUG nova.policy [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b67375d5e85b4ba99d47120945bbf0f6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '52cd193f3ca7403a986d72f072590f4f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 806.492476] env[70020]: DEBUG oslo_concurrency.lockutils [None req-37f333a4-cd06-4d8b-9f11-3be862d50a45 tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Lock "36f15b0a-d57f-49d8-9510-1036e889a438" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.563s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 806.498656] env[70020]: DEBUG oslo_vmware.api [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5288b75e-849f-ec24-77f6-e70c1aae91c7, 'name': SearchDatastore_Task, 'duration_secs': 0.021285} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.501648] env[70020]: DEBUG oslo_concurrency.lockutils [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 806.502132] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] ea97f6ab-057e-44d3-835a-68b46d241621/ea97f6ab-057e-44d3-835a-68b46d241621.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 806.502691] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-afad05ed-d08f-4642-87f9-87e7813e7af1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.509987] env[70020]: DEBUG oslo_vmware.api [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 806.509987] env[70020]: value = "task-3618197" [ 806.509987] env[70020]: _type = "Task" [ 806.509987] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.522601] env[70020]: DEBUG oslo_vmware.api [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618197, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.634683] env[70020]: DEBUG oslo_vmware.api [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618196, 'name': ReconfigVM_Task, 'duration_secs': 0.294966} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.634987] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721666', 'volume_id': '098e0380-2259-4c44-9668-fe847c3f06db', 'name': 'volume-098e0380-2259-4c44-9668-fe847c3f06db', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c08166c5-2c31-4d40-a61c-c541924eb49c', 'attached_at': '', 'detached_at': '', 'volume_id': '098e0380-2259-4c44-9668-fe847c3f06db', 'serial': '098e0380-2259-4c44-9668-fe847c3f06db'} {{(pid=70020) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 806.635179] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 806.636082] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11f5fa31-bb18-4772-b0f1-a5e524a7f417 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.644790] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 806.645041] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1cf2e800-4b32-4670-9721-b98c77ea8c2b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.659253] env[70020]: DEBUG oslo_concurrency.lockutils [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Acquiring lock "refresh_cache-b99195a6-866e-4142-970a-42a0564889ef" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.659452] env[70020]: DEBUG oslo_concurrency.lockutils [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Acquired lock "refresh_cache-b99195a6-866e-4142-970a-42a0564889ef" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 806.659568] env[70020]: DEBUG nova.network.neutron [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] [instance: b99195a6-866e-4142-970a-42a0564889ef] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 806.761911] env[70020]: DEBUG oslo_concurrency.lockutils [None req-97b038d7-5c97-459a-bfd3-232826b9f6ba tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Acquiring lock "36f15b0a-d57f-49d8-9510-1036e889a438" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 806.761911] env[70020]: DEBUG oslo_concurrency.lockutils [None req-97b038d7-5c97-459a-bfd3-232826b9f6ba tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Lock "36f15b0a-d57f-49d8-9510-1036e889a438" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 806.761911] env[70020]: DEBUG oslo_concurrency.lockutils [None req-97b038d7-5c97-459a-bfd3-232826b9f6ba tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Acquiring lock "36f15b0a-d57f-49d8-9510-1036e889a438-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 806.761911] env[70020]: DEBUG oslo_concurrency.lockutils [None req-97b038d7-5c97-459a-bfd3-232826b9f6ba tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Lock "36f15b0a-d57f-49d8-9510-1036e889a438-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 806.761911] env[70020]: DEBUG oslo_concurrency.lockutils [None req-97b038d7-5c97-459a-bfd3-232826b9f6ba tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Lock "36f15b0a-d57f-49d8-9510-1036e889a438-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 806.763319] env[70020]: INFO nova.compute.manager [None req-97b038d7-5c97-459a-bfd3-232826b9f6ba tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Terminating instance [ 806.829533] env[70020]: DEBUG nova.compute.manager [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 806.836866] env[70020]: DEBUG nova.network.neutron [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Successfully created port: 9071978f-4173-4873-86de-85c11de7ddb7 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 806.926382] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-837bbb6f-2ce2-4668-8a73-8a36b59b8c9a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.933938] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b0bce7a-6a57-4409-b1af-ca6e04ab316d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.970144] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-719e60e4-de5b-4583-a661-268e0092be65 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.979990] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e3c9979-5fad-40f7-8921-2b6b918c8958 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.998480] env[70020]: DEBUG nova.compute.provider_tree [None req-103b08c1-83fe-416e-9a36-9f32f9cf69cd tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 807.000144] env[70020]: DEBUG nova.compute.manager [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 807.022520] env[70020]: DEBUG oslo_vmware.api [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618197, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.210258] env[70020]: DEBUG nova.network.neutron [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] [instance: b99195a6-866e-4142-970a-42a0564889ef] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 807.251022] env[70020]: DEBUG oslo_concurrency.lockutils [None req-df6bd22c-3ce4-4726-aff6-0d4bedde0671 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Acquiring lock "08ce6bc8-30fe-4c63-80e1-26c84ae75702" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 807.251022] env[70020]: DEBUG oslo_concurrency.lockutils [None req-740bbd20-87d5-43df-b46d-c5d497d062b8 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "08ce6bc8-30fe-4c63-80e1-26c84ae75702" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.333s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 807.251022] env[70020]: DEBUG oslo_concurrency.lockutils [None req-df6bd22c-3ce4-4726-aff6-0d4bedde0671 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "08ce6bc8-30fe-4c63-80e1-26c84ae75702" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.002s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 807.251022] env[70020]: DEBUG nova.compute.manager [None req-df6bd22c-3ce4-4726-aff6-0d4bedde0671 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 807.251022] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03865a76-3d0b-4d7f-be73-6630bd28e3a9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.267135] env[70020]: DEBUG nova.compute.manager [None req-df6bd22c-3ce4-4726-aff6-0d4bedde0671 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=70020) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 807.270620] env[70020]: DEBUG nova.objects.instance [None req-df6bd22c-3ce4-4726-aff6-0d4bedde0671 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lazy-loading 'flavor' on Instance uuid 08ce6bc8-30fe-4c63-80e1-26c84ae75702 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 807.272899] env[70020]: DEBUG nova.compute.manager [None req-97b038d7-5c97-459a-bfd3-232826b9f6ba tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 807.274554] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-97b038d7-5c97-459a-bfd3-232826b9f6ba tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 807.280217] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3443bf06-102e-4b88-9bcd-fe4ee375cc4b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.293649] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-97b038d7-5c97-459a-bfd3-232826b9f6ba tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 807.293779] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cf8d9c39-4f4e-4fd2-8056-91a00ef5d347 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.311256] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9c7b241b-b181-42d5-8145-f051dc8a0875 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 807.311256] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9c7b241b-b181-42d5-8145-f051dc8a0875 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 807.311256] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c7b241b-b181-42d5-8145-f051dc8a0875 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Deleting the datastore file [datastore1] 1f95bfa8-bc97-4ed7-8c33-c00297430bf5 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 807.311256] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7badcd19-f2b2-4639-9adf-2a131a5305be {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.314633] env[70020]: DEBUG oslo_vmware.api [None req-97b038d7-5c97-459a-bfd3-232826b9f6ba tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Waiting for the task: (returnval){ [ 807.314633] env[70020]: value = "task-3618199" [ 807.314633] env[70020]: _type = "Task" [ 807.314633] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.321885] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 807.322138] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 807.322321] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Deleting the datastore file [datastore1] c08166c5-2c31-4d40-a61c-c541924eb49c {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 807.322649] env[70020]: DEBUG oslo_vmware.api [None req-9c7b241b-b181-42d5-8145-f051dc8a0875 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Waiting for the task: (returnval){ [ 807.322649] env[70020]: value = "task-3618200" [ 807.322649] env[70020]: _type = "Task" [ 807.322649] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.326102] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-70681a21-9c2c-452d-8aa1-3203ccec1560 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.333631] env[70020]: DEBUG oslo_vmware.api [None req-97b038d7-5c97-459a-bfd3-232826b9f6ba tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Task: {'id': task-3618199, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.346294] env[70020]: DEBUG oslo_vmware.api [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 807.346294] env[70020]: value = "task-3618201" [ 807.346294] env[70020]: _type = "Task" [ 807.346294] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.346590] env[70020]: DEBUG oslo_vmware.api [None req-9c7b241b-b181-42d5-8145-f051dc8a0875 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3618200, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.436630] env[70020]: DEBUG nova.network.neutron [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] [instance: b99195a6-866e-4142-970a-42a0564889ef] Updating instance_info_cache with network_info: [{"id": "9cdec97e-ce57-46c5-8d8f-1425a3452a72", "address": "fa:16:3e:0c:a7:52", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.111", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cdec97e-ce", "ovs_interfaceid": "9cdec97e-ce57-46c5-8d8f-1425a3452a72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.504454] env[70020]: DEBUG nova.scheduler.client.report [None req-103b08c1-83fe-416e-9a36-9f32f9cf69cd tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 807.521080] env[70020]: DEBUG oslo_vmware.api [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618197, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.524239] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 807.824561] env[70020]: DEBUG oslo_vmware.api [None req-97b038d7-5c97-459a-bfd3-232826b9f6ba tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Task: {'id': task-3618199, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.835195] env[70020]: DEBUG oslo_vmware.api [None req-9c7b241b-b181-42d5-8145-f051dc8a0875 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Task: {'id': task-3618200, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.225222} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.835411] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c7b241b-b181-42d5-8145-f051dc8a0875 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 807.835596] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9c7b241b-b181-42d5-8145-f051dc8a0875 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 807.835769] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9c7b241b-b181-42d5-8145-f051dc8a0875 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 807.835938] env[70020]: INFO nova.compute.manager [None req-9c7b241b-b181-42d5-8145-f051dc8a0875 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Took 3.14 seconds to destroy the instance on the hypervisor. [ 807.836194] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9c7b241b-b181-42d5-8145-f051dc8a0875 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 807.836384] env[70020]: DEBUG nova.compute.manager [-] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 807.836479] env[70020]: DEBUG nova.network.neutron [-] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 807.843275] env[70020]: DEBUG nova.compute.manager [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 807.858031] env[70020]: DEBUG oslo_vmware.api [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618201, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.24977} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.858228] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 807.858437] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 807.858664] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 807.858899] env[70020]: INFO nova.compute.manager [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Took 2.89 seconds to destroy the instance on the hypervisor. [ 807.859194] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 807.859429] env[70020]: DEBUG nova.compute.manager [-] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 807.859559] env[70020]: DEBUG nova.network.neutron [-] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 807.870712] env[70020]: DEBUG nova.virt.hardware [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 807.870859] env[70020]: DEBUG nova.virt.hardware [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 807.871560] env[70020]: DEBUG nova.virt.hardware [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 807.871560] env[70020]: DEBUG nova.virt.hardware [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 807.871560] env[70020]: DEBUG nova.virt.hardware [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 807.871560] env[70020]: DEBUG nova.virt.hardware [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 807.871882] env[70020]: DEBUG nova.virt.hardware [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 807.871944] env[70020]: DEBUG nova.virt.hardware [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 807.872157] env[70020]: DEBUG nova.virt.hardware [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 807.872325] env[70020]: DEBUG nova.virt.hardware [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 807.872497] env[70020]: DEBUG nova.virt.hardware [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 807.873361] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80e4ac7c-c079-4d21-92b6-4c7266309274 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.881125] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d141b32-9176-49c3-88fe-918be614dc8a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.939296] env[70020]: DEBUG oslo_concurrency.lockutils [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Releasing lock "refresh_cache-b99195a6-866e-4142-970a-42a0564889ef" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 807.939625] env[70020]: DEBUG nova.compute.manager [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] [instance: b99195a6-866e-4142-970a-42a0564889ef] Instance network_info: |[{"id": "9cdec97e-ce57-46c5-8d8f-1425a3452a72", "address": "fa:16:3e:0c:a7:52", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.111", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cdec97e-ce", "ovs_interfaceid": "9cdec97e-ce57-46c5-8d8f-1425a3452a72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 807.940119] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] [instance: b99195a6-866e-4142-970a-42a0564889ef] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:a7:52', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '822050c7-1845-485d-b87e-73778d21c33c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9cdec97e-ce57-46c5-8d8f-1425a3452a72', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 807.948245] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Creating folder: Project (51e12151b4a44683868b3fbfc0bb5fa9). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 807.952234] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-485b8abc-8952-4587-9510-f500d58c5ce9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.962550] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Created folder: Project (51e12151b4a44683868b3fbfc0bb5fa9) in parent group-v721521. [ 807.962774] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Creating folder: Instances. Parent ref: group-v721674. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 807.963043] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7ec7a37b-eb98-4e19-a338-baf448d1c27b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.973365] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Created folder: Instances in parent group-v721674. [ 807.973613] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 807.973917] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b99195a6-866e-4142-970a-42a0564889ef] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 807.974016] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e37c3e0d-4b4d-42b1-880f-d44d0076602f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.993800] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 807.993800] env[70020]: value = "task-3618204" [ 807.993800] env[70020]: _type = "Task" [ 807.993800] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.001862] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618204, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.012045] env[70020]: DEBUG oslo_concurrency.lockutils [None req-103b08c1-83fe-416e-9a36-9f32f9cf69cd tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.197s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 808.019032] env[70020]: DEBUG nova.compute.manager [req-2c95faf9-e758-4720-9d10-db1159bf0ab9 req-26225666-9394-4c74-8994-24926f510085 service nova] [instance: b99195a6-866e-4142-970a-42a0564889ef] Received event network-changed-9cdec97e-ce57-46c5-8d8f-1425a3452a72 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 808.019235] env[70020]: DEBUG nova.compute.manager [req-2c95faf9-e758-4720-9d10-db1159bf0ab9 req-26225666-9394-4c74-8994-24926f510085 service nova] [instance: b99195a6-866e-4142-970a-42a0564889ef] Refreshing instance network info cache due to event network-changed-9cdec97e-ce57-46c5-8d8f-1425a3452a72. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 808.019455] env[70020]: DEBUG oslo_concurrency.lockutils [req-2c95faf9-e758-4720-9d10-db1159bf0ab9 req-26225666-9394-4c74-8994-24926f510085 service nova] Acquiring lock "refresh_cache-b99195a6-866e-4142-970a-42a0564889ef" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.019593] env[70020]: DEBUG oslo_concurrency.lockutils [req-2c95faf9-e758-4720-9d10-db1159bf0ab9 req-26225666-9394-4c74-8994-24926f510085 service nova] Acquired lock "refresh_cache-b99195a6-866e-4142-970a-42a0564889ef" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 808.019759] env[70020]: DEBUG nova.network.neutron [req-2c95faf9-e758-4720-9d10-db1159bf0ab9 req-26225666-9394-4c74-8994-24926f510085 service nova] [instance: b99195a6-866e-4142-970a-42a0564889ef] Refreshing network info cache for port 9cdec97e-ce57-46c5-8d8f-1425a3452a72 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 808.021157] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.482s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 808.022615] env[70020]: INFO nova.compute.claims [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: f7a42358-f26a-4651-a929-d3836f050648] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 808.039259] env[70020]: DEBUG oslo_vmware.api [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618197, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.045251] env[70020]: INFO nova.scheduler.client.report [None req-103b08c1-83fe-416e-9a36-9f32f9cf69cd tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Deleted allocations for instance ae91adc5-b3a4-4503-91f2-d803eaefedc5 [ 808.280668] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-df6bd22c-3ce4-4726-aff6-0d4bedde0671 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 808.281086] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ad4cbd28-ed57-4803-a58f-da16320d570d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.290696] env[70020]: DEBUG nova.compute.manager [req-a50575c7-38b6-4d3d-b061-d545b5aab8da req-925e87e9-db44-4233-a14e-c96afc6acce9 service nova] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Received event network-vif-deleted-ce1440b0-008c-48c7-b1c8-61ab26650d98 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 808.290912] env[70020]: INFO nova.compute.manager [req-a50575c7-38b6-4d3d-b061-d545b5aab8da req-925e87e9-db44-4233-a14e-c96afc6acce9 service nova] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Neutron deleted interface ce1440b0-008c-48c7-b1c8-61ab26650d98; detaching it from the instance and deleting it from the info cache [ 808.291138] env[70020]: DEBUG nova.network.neutron [req-a50575c7-38b6-4d3d-b061-d545b5aab8da req-925e87e9-db44-4233-a14e-c96afc6acce9 service nova] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.294023] env[70020]: DEBUG oslo_vmware.api [None req-df6bd22c-3ce4-4726-aff6-0d4bedde0671 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for the task: (returnval){ [ 808.294023] env[70020]: value = "task-3618205" [ 808.294023] env[70020]: _type = "Task" [ 808.294023] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.304207] env[70020]: DEBUG oslo_vmware.api [None req-df6bd22c-3ce4-4726-aff6-0d4bedde0671 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618205, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.327988] env[70020]: DEBUG oslo_vmware.api [None req-97b038d7-5c97-459a-bfd3-232826b9f6ba tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Task: {'id': task-3618199, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.505732] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618204, 'name': CreateVM_Task} progress is 25%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.544352] env[70020]: DEBUG oslo_vmware.api [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618197, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.799815} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.545234] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] ea97f6ab-057e-44d3-835a-68b46d241621/ea97f6ab-057e-44d3-835a-68b46d241621.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 808.546771] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 808.547387] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0c0bc86c-5ba0-44f3-a5d8-41e7e1a69aa5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.555675] env[70020]: DEBUG oslo_concurrency.lockutils [None req-103b08c1-83fe-416e-9a36-9f32f9cf69cd tempest-ServerDiagnosticsTest-382342043 tempest-ServerDiagnosticsTest-382342043-project-member] Lock "ae91adc5-b3a4-4503-91f2-d803eaefedc5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.975s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 808.562022] env[70020]: DEBUG oslo_vmware.api [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 808.562022] env[70020]: value = "task-3618206" [ 808.562022] env[70020]: _type = "Task" [ 808.562022] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.570909] env[70020]: DEBUG oslo_vmware.api [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618206, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.772440] env[70020]: DEBUG nova.network.neutron [-] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.795791] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d983763e-b1ef-410b-be8d-929d44cca57f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.809794] env[70020]: DEBUG oslo_vmware.api [None req-df6bd22c-3ce4-4726-aff6-0d4bedde0671 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618205, 'name': PowerOffVM_Task, 'duration_secs': 0.242343} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.811509] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-df6bd22c-3ce4-4726-aff6-0d4bedde0671 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 808.811702] env[70020]: DEBUG nova.compute.manager [None req-df6bd22c-3ce4-4726-aff6-0d4bedde0671 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 808.812530] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39480059-975e-46d9-b52d-a279b3ae5532 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.818050] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68397447-3f14-4c1a-ac6c-950f5935e966 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.847870] env[70020]: DEBUG oslo_vmware.api [None req-97b038d7-5c97-459a-bfd3-232826b9f6ba tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Task: {'id': task-3618199, 'name': PowerOffVM_Task, 'duration_secs': 1.253394} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.864358] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-97b038d7-5c97-459a-bfd3-232826b9f6ba tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 808.864578] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-97b038d7-5c97-459a-bfd3-232826b9f6ba tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 808.865524] env[70020]: DEBUG nova.compute.manager [req-a50575c7-38b6-4d3d-b061-d545b5aab8da req-925e87e9-db44-4233-a14e-c96afc6acce9 service nova] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Detach interface failed, port_id=ce1440b0-008c-48c7-b1c8-61ab26650d98, reason: Instance c08166c5-2c31-4d40-a61c-c541924eb49c could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 808.865807] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7b9584ec-af9f-421d-a667-94dba4230515 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.906888] env[70020]: DEBUG nova.network.neutron [req-2c95faf9-e758-4720-9d10-db1159bf0ab9 req-26225666-9394-4c74-8994-24926f510085 service nova] [instance: b99195a6-866e-4142-970a-42a0564889ef] Updated VIF entry in instance network info cache for port 9cdec97e-ce57-46c5-8d8f-1425a3452a72. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 808.906888] env[70020]: DEBUG nova.network.neutron [req-2c95faf9-e758-4720-9d10-db1159bf0ab9 req-26225666-9394-4c74-8994-24926f510085 service nova] [instance: b99195a6-866e-4142-970a-42a0564889ef] Updating instance_info_cache with network_info: [{"id": "9cdec97e-ce57-46c5-8d8f-1425a3452a72", "address": "fa:16:3e:0c:a7:52", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.111", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cdec97e-ce", "ovs_interfaceid": "9cdec97e-ce57-46c5-8d8f-1425a3452a72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.933180] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-97b038d7-5c97-459a-bfd3-232826b9f6ba tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 808.934028] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-97b038d7-5c97-459a-bfd3-232826b9f6ba tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 808.934028] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-97b038d7-5c97-459a-bfd3-232826b9f6ba tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Deleting the datastore file [datastore1] 36f15b0a-d57f-49d8-9510-1036e889a438 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 808.934028] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f835a6b0-3f49-42fe-826f-eaeb627d68e5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.944112] env[70020]: DEBUG oslo_vmware.api [None req-97b038d7-5c97-459a-bfd3-232826b9f6ba tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Waiting for the task: (returnval){ [ 808.944112] env[70020]: value = "task-3618208" [ 808.944112] env[70020]: _type = "Task" [ 808.944112] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.952328] env[70020]: DEBUG oslo_vmware.api [None req-97b038d7-5c97-459a-bfd3-232826b9f6ba tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Task: {'id': task-3618208, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.005094] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618204, 'name': CreateVM_Task, 'duration_secs': 0.718409} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.005280] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b99195a6-866e-4142-970a-42a0564889ef] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 809.006115] env[70020]: DEBUG oslo_concurrency.lockutils [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.006294] env[70020]: DEBUG oslo_concurrency.lockutils [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 809.006669] env[70020]: DEBUG oslo_concurrency.lockutils [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 809.006925] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f90029ef-36de-4f5e-9d96-5fbf0b723f0d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.012037] env[70020]: DEBUG oslo_vmware.api [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Waiting for the task: (returnval){ [ 809.012037] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52090aac-8d6b-d471-a34a-36fd7a052047" [ 809.012037] env[70020]: _type = "Task" [ 809.012037] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.019625] env[70020]: DEBUG oslo_vmware.api [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52090aac-8d6b-d471-a34a-36fd7a052047, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.029132] env[70020]: DEBUG nova.network.neutron [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Successfully updated port: 9071978f-4173-4873-86de-85c11de7ddb7 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 809.069834] env[70020]: DEBUG oslo_vmware.api [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618206, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.155045} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.070207] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 809.071058] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6124cdd3-1297-4ef0-862f-cfef758c8dc3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.093400] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Reconfiguring VM instance instance-0000000d to attach disk [datastore2] ea97f6ab-057e-44d3-835a-68b46d241621/ea97f6ab-057e-44d3-835a-68b46d241621.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 809.096330] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-106faee9-db50-4237-9584-0768ca337f5a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.116630] env[70020]: DEBUG oslo_vmware.api [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 809.116630] env[70020]: value = "task-3618209" [ 809.116630] env[70020]: _type = "Task" [ 809.116630] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.127133] env[70020]: DEBUG oslo_vmware.api [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618209, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.241169] env[70020]: DEBUG nova.network.neutron [-] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.278542] env[70020]: INFO nova.compute.manager [-] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Took 1.42 seconds to deallocate network for instance. [ 809.344671] env[70020]: DEBUG oslo_concurrency.lockutils [None req-df6bd22c-3ce4-4726-aff6-0d4bedde0671 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "08ce6bc8-30fe-4c63-80e1-26c84ae75702" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.094s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 809.409633] env[70020]: DEBUG oslo_concurrency.lockutils [req-2c95faf9-e758-4720-9d10-db1159bf0ab9 req-26225666-9394-4c74-8994-24926f510085 service nova] Releasing lock "refresh_cache-b99195a6-866e-4142-970a-42a0564889ef" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 809.455121] env[70020]: DEBUG oslo_vmware.api [None req-97b038d7-5c97-459a-bfd3-232826b9f6ba tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Task: {'id': task-3618208, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143157} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.455775] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-97b038d7-5c97-459a-bfd3-232826b9f6ba tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 809.455775] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-97b038d7-5c97-459a-bfd3-232826b9f6ba tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 809.455775] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-97b038d7-5c97-459a-bfd3-232826b9f6ba tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 809.459224] env[70020]: INFO nova.compute.manager [None req-97b038d7-5c97-459a-bfd3-232826b9f6ba tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Took 2.18 seconds to destroy the instance on the hypervisor. [ 809.459224] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-97b038d7-5c97-459a-bfd3-232826b9f6ba tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 809.459224] env[70020]: DEBUG nova.compute.manager [-] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 809.459224] env[70020]: DEBUG nova.network.neutron [-] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 809.528019] env[70020]: DEBUG oslo_vmware.api [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52090aac-8d6b-d471-a34a-36fd7a052047, 'name': SearchDatastore_Task, 'duration_secs': 0.009379} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.528019] env[70020]: DEBUG oslo_concurrency.lockutils [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 809.528019] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] [instance: b99195a6-866e-4142-970a-42a0564889ef] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 809.528019] env[70020]: DEBUG oslo_concurrency.lockutils [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.528019] env[70020]: DEBUG oslo_concurrency.lockutils [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 809.528019] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 809.528019] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a8bb1c37-f90f-4d33-ad10-fc103495b634 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.533294] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "refresh_cache-f56e88f6-3a25-44d9-bdb1-cc4291169c9c" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.533294] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquired lock "refresh_cache-f56e88f6-3a25-44d9-bdb1-cc4291169c9c" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 809.533464] env[70020]: DEBUG nova.network.neutron [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 809.536239] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 809.536360] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 809.537351] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f85888e-feca-4a36-8731-ce985d83e610 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.543065] env[70020]: DEBUG oslo_vmware.api [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Waiting for the task: (returnval){ [ 809.543065] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52583559-c375-749f-ac1e-b701bb1d3312" [ 809.543065] env[70020]: _type = "Task" [ 809.543065] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.552028] env[70020]: DEBUG oslo_vmware.api [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52583559-c375-749f-ac1e-b701bb1d3312, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.627176] env[70020]: DEBUG oslo_vmware.api [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618209, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.656893] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5db076d-db47-4ab1-ac3d-56bf9e4a7725 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.670949] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58a14a24-1d27-40b6-acce-f01933dbe73b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.714960] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-745e163b-34f1-479c-8d54-4d125870d2b1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.725180] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfd42c08-3ab6-410f-8502-f3a5d74f123d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.740308] env[70020]: DEBUG nova.compute.provider_tree [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 809.742361] env[70020]: INFO nova.compute.manager [-] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Took 1.91 seconds to deallocate network for instance. [ 809.844986] env[70020]: INFO nova.compute.manager [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Took 0.57 seconds to detach 1 volumes for instance. [ 810.053865] env[70020]: DEBUG oslo_vmware.api [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52583559-c375-749f-ac1e-b701bb1d3312, 'name': SearchDatastore_Task, 'duration_secs': 0.008549} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.054671] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13310e66-9007-44e6-938b-e67750516a74 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.060047] env[70020]: DEBUG oslo_vmware.api [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Waiting for the task: (returnval){ [ 810.060047] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52c6314a-b3cb-57a0-14f1-e4b3e520460f" [ 810.060047] env[70020]: _type = "Task" [ 810.060047] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.067210] env[70020]: DEBUG oslo_vmware.api [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52c6314a-b3cb-57a0-14f1-e4b3e520460f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.097589] env[70020]: DEBUG nova.network.neutron [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 810.112478] env[70020]: DEBUG nova.compute.manager [req-0aaf0ad0-49f3-4e5e-9a63-08e1575756e3 req-3b77a15d-7a14-435e-a88d-1de46c860dfb service nova] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Received event network-vif-plugged-9071978f-4173-4873-86de-85c11de7ddb7 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 810.112478] env[70020]: DEBUG oslo_concurrency.lockutils [req-0aaf0ad0-49f3-4e5e-9a63-08e1575756e3 req-3b77a15d-7a14-435e-a88d-1de46c860dfb service nova] Acquiring lock "f56e88f6-3a25-44d9-bdb1-cc4291169c9c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 810.112633] env[70020]: DEBUG oslo_concurrency.lockutils [req-0aaf0ad0-49f3-4e5e-9a63-08e1575756e3 req-3b77a15d-7a14-435e-a88d-1de46c860dfb service nova] Lock "f56e88f6-3a25-44d9-bdb1-cc4291169c9c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 810.112718] env[70020]: DEBUG oslo_concurrency.lockutils [req-0aaf0ad0-49f3-4e5e-9a63-08e1575756e3 req-3b77a15d-7a14-435e-a88d-1de46c860dfb service nova] Lock "f56e88f6-3a25-44d9-bdb1-cc4291169c9c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 810.112879] env[70020]: DEBUG nova.compute.manager [req-0aaf0ad0-49f3-4e5e-9a63-08e1575756e3 req-3b77a15d-7a14-435e-a88d-1de46c860dfb service nova] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] No waiting events found dispatching network-vif-plugged-9071978f-4173-4873-86de-85c11de7ddb7 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 810.113089] env[70020]: WARNING nova.compute.manager [req-0aaf0ad0-49f3-4e5e-9a63-08e1575756e3 req-3b77a15d-7a14-435e-a88d-1de46c860dfb service nova] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Received unexpected event network-vif-plugged-9071978f-4173-4873-86de-85c11de7ddb7 for instance with vm_state building and task_state spawning. [ 810.113270] env[70020]: DEBUG nova.compute.manager [req-0aaf0ad0-49f3-4e5e-9a63-08e1575756e3 req-3b77a15d-7a14-435e-a88d-1de46c860dfb service nova] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Received event network-changed-9071978f-4173-4873-86de-85c11de7ddb7 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 810.113409] env[70020]: DEBUG nova.compute.manager [req-0aaf0ad0-49f3-4e5e-9a63-08e1575756e3 req-3b77a15d-7a14-435e-a88d-1de46c860dfb service nova] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Refreshing instance network info cache due to event network-changed-9071978f-4173-4873-86de-85c11de7ddb7. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 810.113610] env[70020]: DEBUG oslo_concurrency.lockutils [req-0aaf0ad0-49f3-4e5e-9a63-08e1575756e3 req-3b77a15d-7a14-435e-a88d-1de46c860dfb service nova] Acquiring lock "refresh_cache-f56e88f6-3a25-44d9-bdb1-cc4291169c9c" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 810.127180] env[70020]: DEBUG oslo_vmware.api [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618209, 'name': ReconfigVM_Task, 'duration_secs': 0.796412} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.127475] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Reconfigured VM instance instance-0000000d to attach disk [datastore2] ea97f6ab-057e-44d3-835a-68b46d241621/ea97f6ab-057e-44d3-835a-68b46d241621.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 810.128088] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5ae9fdc4-e76f-4f3a-9f59-83614c9aefec {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.134719] env[70020]: DEBUG oslo_vmware.api [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 810.134719] env[70020]: value = "task-3618210" [ 810.134719] env[70020]: _type = "Task" [ 810.134719] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.143140] env[70020]: DEBUG oslo_vmware.api [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618210, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.183534] env[70020]: DEBUG nova.objects.instance [None req-0ff7a455-c9a7-4027-a1fc-656caa3957fe tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lazy-loading 'flavor' on Instance uuid 08ce6bc8-30fe-4c63-80e1-26c84ae75702 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 810.245852] env[70020]: DEBUG nova.scheduler.client.report [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 810.253026] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9c7b241b-b181-42d5-8145-f051dc8a0875 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 810.295689] env[70020]: DEBUG nova.network.neutron [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Updating instance_info_cache with network_info: [{"id": "9071978f-4173-4873-86de-85c11de7ddb7", "address": "fa:16:3e:51:0a:a6", "network": {"id": "8a7c653b-f915-404f-a4ab-bea48b7e6574", "bridge": "br-int", "label": "tempest-ImagesTestJSON-377932844-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52cd193f3ca7403a986d72f072590f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9071978f-41", "ovs_interfaceid": "9071978f-4173-4873-86de-85c11de7ddb7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 810.318812] env[70020]: DEBUG nova.compute.manager [req-46eb471c-4ae3-4357-9485-9403f5f74777 req-039ecd58-d61c-42a1-b88c-4a0d5e93447b service nova] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Received event network-vif-deleted-27c6992d-5e25-418c-83e7-a49ce44dee0e {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 810.344519] env[70020]: DEBUG nova.network.neutron [-] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 810.354949] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 810.570386] env[70020]: DEBUG oslo_vmware.api [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52c6314a-b3cb-57a0-14f1-e4b3e520460f, 'name': SearchDatastore_Task, 'duration_secs': 0.009389} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.570386] env[70020]: DEBUG oslo_concurrency.lockutils [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 810.570552] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] b99195a6-866e-4142-970a-42a0564889ef/b99195a6-866e-4142-970a-42a0564889ef.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 810.570696] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e2f50770-f956-4c63-bb8e-8f5837d9b13b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.577452] env[70020]: DEBUG oslo_vmware.api [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Waiting for the task: (returnval){ [ 810.577452] env[70020]: value = "task-3618211" [ 810.577452] env[70020]: _type = "Task" [ 810.577452] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.587024] env[70020]: DEBUG oslo_vmware.api [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Task: {'id': task-3618211, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.653503] env[70020]: DEBUG oslo_vmware.api [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618210, 'name': Rename_Task, 'duration_secs': 0.470022} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.653503] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 810.653503] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-54b6e86f-9766-4072-9c52-34df580b8e8a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.659246] env[70020]: DEBUG oslo_vmware.api [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 810.659246] env[70020]: value = "task-3618212" [ 810.659246] env[70020]: _type = "Task" [ 810.659246] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.668600] env[70020]: DEBUG oslo_vmware.api [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618212, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.689917] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0ff7a455-c9a7-4027-a1fc-656caa3957fe tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Acquiring lock "refresh_cache-08ce6bc8-30fe-4c63-80e1-26c84ae75702" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 810.689917] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0ff7a455-c9a7-4027-a1fc-656caa3957fe tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Acquired lock "refresh_cache-08ce6bc8-30fe-4c63-80e1-26c84ae75702" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 810.690646] env[70020]: DEBUG nova.network.neutron [None req-0ff7a455-c9a7-4027-a1fc-656caa3957fe tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 810.690646] env[70020]: DEBUG nova.objects.instance [None req-0ff7a455-c9a7-4027-a1fc-656caa3957fe tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lazy-loading 'info_cache' on Instance uuid 08ce6bc8-30fe-4c63-80e1-26c84ae75702 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 810.758497] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.734s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 810.758497] env[70020]: DEBUG nova.compute.manager [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: f7a42358-f26a-4651-a929-d3836f050648] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 810.761329] env[70020]: DEBUG oslo_concurrency.lockutils [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.660s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 810.767636] env[70020]: INFO nova.compute.claims [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 810.801250] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Releasing lock "refresh_cache-f56e88f6-3a25-44d9-bdb1-cc4291169c9c" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 810.801250] env[70020]: DEBUG nova.compute.manager [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Instance network_info: |[{"id": "9071978f-4173-4873-86de-85c11de7ddb7", "address": "fa:16:3e:51:0a:a6", "network": {"id": "8a7c653b-f915-404f-a4ab-bea48b7e6574", "bridge": "br-int", "label": "tempest-ImagesTestJSON-377932844-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52cd193f3ca7403a986d72f072590f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9071978f-41", "ovs_interfaceid": "9071978f-4173-4873-86de-85c11de7ddb7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 810.801250] env[70020]: DEBUG oslo_concurrency.lockutils [req-0aaf0ad0-49f3-4e5e-9a63-08e1575756e3 req-3b77a15d-7a14-435e-a88d-1de46c860dfb service nova] Acquired lock "refresh_cache-f56e88f6-3a25-44d9-bdb1-cc4291169c9c" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 810.801250] env[70020]: DEBUG nova.network.neutron [req-0aaf0ad0-49f3-4e5e-9a63-08e1575756e3 req-3b77a15d-7a14-435e-a88d-1de46c860dfb service nova] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Refreshing network info cache for port 9071978f-4173-4873-86de-85c11de7ddb7 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 810.801250] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:51:0a:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a0734cc4-5718-45e2-9f98-0ded96880bef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9071978f-4173-4873-86de-85c11de7ddb7', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 810.813133] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 810.817246] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 810.819321] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-278f6e26-3847-4a70-aefe-9fcf90783352 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.843410] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 810.843410] env[70020]: value = "task-3618213" [ 810.843410] env[70020]: _type = "Task" [ 810.843410] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.848280] env[70020]: INFO nova.compute.manager [-] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Took 1.39 seconds to deallocate network for instance. [ 810.857835] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618213, 'name': CreateVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.087985] env[70020]: DEBUG oslo_vmware.api [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Task: {'id': task-3618211, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.506149} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.089313] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] b99195a6-866e-4142-970a-42a0564889ef/b99195a6-866e-4142-970a-42a0564889ef.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 811.089313] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] [instance: b99195a6-866e-4142-970a-42a0564889ef] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 811.089313] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-30a80dc7-5430-4b21-b39f-9eddd15e6632 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.095697] env[70020]: DEBUG oslo_vmware.api [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Waiting for the task: (returnval){ [ 811.095697] env[70020]: value = "task-3618214" [ 811.095697] env[70020]: _type = "Task" [ 811.095697] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.111159] env[70020]: DEBUG oslo_vmware.api [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Task: {'id': task-3618214, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.174621] env[70020]: DEBUG oslo_vmware.api [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618212, 'name': PowerOnVM_Task} progress is 87%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.193898] env[70020]: DEBUG nova.objects.base [None req-0ff7a455-c9a7-4027-a1fc-656caa3957fe tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Object Instance<08ce6bc8-30fe-4c63-80e1-26c84ae75702> lazy-loaded attributes: flavor,info_cache {{(pid=70020) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 811.248923] env[70020]: DEBUG nova.network.neutron [req-0aaf0ad0-49f3-4e5e-9a63-08e1575756e3 req-3b77a15d-7a14-435e-a88d-1de46c860dfb service nova] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Updated VIF entry in instance network info cache for port 9071978f-4173-4873-86de-85c11de7ddb7. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 811.248923] env[70020]: DEBUG nova.network.neutron [req-0aaf0ad0-49f3-4e5e-9a63-08e1575756e3 req-3b77a15d-7a14-435e-a88d-1de46c860dfb service nova] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Updating instance_info_cache with network_info: [{"id": "9071978f-4173-4873-86de-85c11de7ddb7", "address": "fa:16:3e:51:0a:a6", "network": {"id": "8a7c653b-f915-404f-a4ab-bea48b7e6574", "bridge": "br-int", "label": "tempest-ImagesTestJSON-377932844-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52cd193f3ca7403a986d72f072590f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9071978f-41", "ovs_interfaceid": "9071978f-4173-4873-86de-85c11de7ddb7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.272957] env[70020]: DEBUG nova.compute.utils [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 811.281187] env[70020]: DEBUG nova.compute.manager [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: f7a42358-f26a-4651-a929-d3836f050648] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 811.281187] env[70020]: DEBUG nova.network.neutron [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: f7a42358-f26a-4651-a929-d3836f050648] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 811.325018] env[70020]: DEBUG nova.policy [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '025d293d3c0449e1b36a7aa465ad1110', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bd3733a000724aab9255cb498cecdfba', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 811.354990] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618213, 'name': CreateVM_Task} progress is 25%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.362057] env[70020]: DEBUG oslo_concurrency.lockutils [None req-97b038d7-5c97-459a-bfd3-232826b9f6ba tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 811.610528] env[70020]: DEBUG oslo_vmware.api [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Task: {'id': task-3618214, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06504} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.610962] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] [instance: b99195a6-866e-4142-970a-42a0564889ef] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 811.611684] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9996087d-06db-4693-9792-c93587632fbc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.637866] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] [instance: b99195a6-866e-4142-970a-42a0564889ef] Reconfiguring VM instance instance-00000031 to attach disk [datastore2] b99195a6-866e-4142-970a-42a0564889ef/b99195a6-866e-4142-970a-42a0564889ef.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 811.637866] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a0931523-9466-4c4b-9cf0-308e1d8b7540 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.658156] env[70020]: DEBUG oslo_vmware.api [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Waiting for the task: (returnval){ [ 811.658156] env[70020]: value = "task-3618215" [ 811.658156] env[70020]: _type = "Task" [ 811.658156] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.669845] env[70020]: DEBUG oslo_vmware.api [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Task: {'id': task-3618215, 'name': ReconfigVM_Task} progress is 10%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.673047] env[70020]: DEBUG oslo_vmware.api [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618212, 'name': PowerOnVM_Task, 'duration_secs': 0.923683} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.673321] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 811.673520] env[70020]: DEBUG nova.compute.manager [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 811.674322] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f52f936-e99b-4236-96e7-2e5065cf6c7b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.753299] env[70020]: DEBUG oslo_concurrency.lockutils [req-0aaf0ad0-49f3-4e5e-9a63-08e1575756e3 req-3b77a15d-7a14-435e-a88d-1de46c860dfb service nova] Releasing lock "refresh_cache-f56e88f6-3a25-44d9-bdb1-cc4291169c9c" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 811.753614] env[70020]: DEBUG nova.compute.manager [req-0aaf0ad0-49f3-4e5e-9a63-08e1575756e3 req-3b77a15d-7a14-435e-a88d-1de46c860dfb service nova] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Received event network-vif-deleted-d57e8cdf-1b06-49d3-ba61-715ba529bb2e {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 811.753794] env[70020]: INFO nova.compute.manager [req-0aaf0ad0-49f3-4e5e-9a63-08e1575756e3 req-3b77a15d-7a14-435e-a88d-1de46c860dfb service nova] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Neutron deleted interface d57e8cdf-1b06-49d3-ba61-715ba529bb2e; detaching it from the instance and deleting it from the info cache [ 811.754061] env[70020]: DEBUG nova.network.neutron [req-0aaf0ad0-49f3-4e5e-9a63-08e1575756e3 req-3b77a15d-7a14-435e-a88d-1de46c860dfb service nova] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.784722] env[70020]: DEBUG nova.compute.manager [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: f7a42358-f26a-4651-a929-d3836f050648] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 811.836139] env[70020]: DEBUG nova.network.neutron [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: f7a42358-f26a-4651-a929-d3836f050648] Successfully created port: 40c50349-5efa-4257-b479-b8632c4d6a66 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 811.864045] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618213, 'name': CreateVM_Task, 'duration_secs': 0.675131} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.864045] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 811.864045] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.864045] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 811.864045] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 811.864045] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ae3943a-bb12-4138-8430-70757bbb871d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.872156] env[70020]: DEBUG oslo_vmware.api [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 811.872156] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]526623e1-e4ce-254a-280f-68ad35244c3c" [ 811.872156] env[70020]: _type = "Task" [ 811.872156] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.882788] env[70020]: DEBUG oslo_vmware.api [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]526623e1-e4ce-254a-280f-68ad35244c3c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.046709] env[70020]: DEBUG nova.network.neutron [None req-0ff7a455-c9a7-4027-a1fc-656caa3957fe tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Updating instance_info_cache with network_info: [{"id": "7cbd6812-9369-466e-a269-def6f4b8ed8f", "address": "fa:16:3e:99:62:fe", "network": {"id": "1aa98d4f-b271-4d4d-b8d3-156403999ea9", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1993994671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.133", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7412ed0b196c4d44b03bc93b0aae2954", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "503991c4-44d0-42d9-aa03-5259331f1051", "external-id": "nsx-vlan-transportzone-3", "segmentation_id": 3, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cbd6812-93", "ovs_interfaceid": "7cbd6812-9369-466e-a269-def6f4b8ed8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.172788] env[70020]: DEBUG oslo_vmware.api [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Task: {'id': task-3618215, 'name': ReconfigVM_Task, 'duration_secs': 0.305361} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.173108] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] [instance: b99195a6-866e-4142-970a-42a0564889ef] Reconfigured VM instance instance-00000031 to attach disk [datastore2] b99195a6-866e-4142-970a-42a0564889ef/b99195a6-866e-4142-970a-42a0564889ef.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 812.173724] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-99d2766d-aedf-4710-a2af-e5d62df4bb3c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.181859] env[70020]: DEBUG oslo_vmware.api [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Waiting for the task: (returnval){ [ 812.181859] env[70020]: value = "task-3618216" [ 812.181859] env[70020]: _type = "Task" [ 812.181859] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.202019] env[70020]: DEBUG oslo_vmware.api [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Task: {'id': task-3618216, 'name': Rename_Task} progress is 10%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.202582] env[70020]: DEBUG oslo_concurrency.lockutils [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 812.258089] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fd4de71a-ba13-45f3-a96b-d054e3285384 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.267729] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cbdc6f1-0a6b-4027-8e90-3618c2dd1640 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.310743] env[70020]: DEBUG nova.compute.manager [req-0aaf0ad0-49f3-4e5e-9a63-08e1575756e3 req-3b77a15d-7a14-435e-a88d-1de46c860dfb service nova] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Detach interface failed, port_id=d57e8cdf-1b06-49d3-ba61-715ba529bb2e, reason: Instance 36f15b0a-d57f-49d8-9510-1036e889a438 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 812.387154] env[70020]: DEBUG oslo_vmware.api [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]526623e1-e4ce-254a-280f-68ad35244c3c, 'name': SearchDatastore_Task, 'duration_secs': 0.011961} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.390654] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 812.390707] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 812.390941] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.391099] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 812.391273] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 812.392568] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-52c45715-5f40-4b12-9d09-04b325f16aa4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.402417] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 812.402474] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 812.403221] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36d74313-2a02-42c6-a8bf-183075eca72d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.411188] env[70020]: DEBUG oslo_vmware.api [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 812.411188] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52de11a8-7ed2-b02d-c4e7-210265acd8fc" [ 812.411188] env[70020]: _type = "Task" [ 812.411188] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.418892] env[70020]: DEBUG oslo_vmware.api [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52de11a8-7ed2-b02d-c4e7-210265acd8fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.447510] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07243522-ba72-4391-9088-8dbca7942325 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.455470] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc896c2f-e89e-41c8-acae-602a0f0fed09 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.488161] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef53e1b9-8c5c-4707-8acb-bf4bdd74c587 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.500066] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8552214b-7106-45c8-b696-fbbea34bd580 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.513980] env[70020]: DEBUG nova.compute.provider_tree [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 812.548103] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0ff7a455-c9a7-4027-a1fc-656caa3957fe tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Releasing lock "refresh_cache-08ce6bc8-30fe-4c63-80e1-26c84ae75702" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 812.695078] env[70020]: DEBUG oslo_vmware.api [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Task: {'id': task-3618216, 'name': Rename_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.813634] env[70020]: DEBUG nova.compute.manager [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: f7a42358-f26a-4651-a929-d3836f050648] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 812.844274] env[70020]: DEBUG nova.virt.hardware [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 812.844821] env[70020]: DEBUG nova.virt.hardware [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 812.848082] env[70020]: DEBUG nova.virt.hardware [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 812.848082] env[70020]: DEBUG nova.virt.hardware [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 812.848082] env[70020]: DEBUG nova.virt.hardware [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 812.848082] env[70020]: DEBUG nova.virt.hardware [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 812.848082] env[70020]: DEBUG nova.virt.hardware [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 812.848082] env[70020]: DEBUG nova.virt.hardware [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 812.848082] env[70020]: DEBUG nova.virt.hardware [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 812.848082] env[70020]: DEBUG nova.virt.hardware [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 812.848082] env[70020]: DEBUG nova.virt.hardware [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 812.848082] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea84fbb9-ea65-4118-ad37-c6cb52c7a8e9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.857238] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8b9714f-fdc1-46f4-8b58-2c099e5f00e5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.921337] env[70020]: DEBUG oslo_vmware.api [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52de11a8-7ed2-b02d-c4e7-210265acd8fc, 'name': SearchDatastore_Task, 'duration_secs': 0.008862} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.922354] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a91469a9-83dc-4e31-8898-2a23bea4a976 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.928457] env[70020]: DEBUG oslo_vmware.api [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 812.928457] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52161d83-c668-0b5f-bd05-7dfac5c502d6" [ 812.928457] env[70020]: _type = "Task" [ 812.928457] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.937783] env[70020]: DEBUG oslo_vmware.api [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52161d83-c668-0b5f-bd05-7dfac5c502d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.017118] env[70020]: DEBUG nova.scheduler.client.report [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 813.195606] env[70020]: DEBUG oslo_vmware.api [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Task: {'id': task-3618216, 'name': Rename_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.439523] env[70020]: DEBUG oslo_vmware.api [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52161d83-c668-0b5f-bd05-7dfac5c502d6, 'name': SearchDatastore_Task, 'duration_secs': 0.010336} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.439523] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 813.439712] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] f56e88f6-3a25-44d9-bdb1-cc4291169c9c/f56e88f6-3a25-44d9-bdb1-cc4291169c9c.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 813.439859] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9b50f761-0670-4a00-ad76-b7772cfcf4d5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.446926] env[70020]: DEBUG oslo_vmware.api [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 813.446926] env[70020]: value = "task-3618217" [ 813.446926] env[70020]: _type = "Task" [ 813.446926] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.460849] env[70020]: DEBUG oslo_vmware.api [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618217, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.522873] env[70020]: DEBUG oslo_concurrency.lockutils [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.761s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 813.523178] env[70020]: DEBUG nova.compute.manager [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 813.527152] env[70020]: DEBUG oslo_concurrency.lockutils [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.190s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 813.527667] env[70020]: INFO nova.compute.claims [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 813.554524] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ff7a455-c9a7-4027-a1fc-656caa3957fe tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 813.555172] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8acd1965-35b9-4ada-95d3-430d5dc87065 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.565995] env[70020]: DEBUG oslo_vmware.api [None req-0ff7a455-c9a7-4027-a1fc-656caa3957fe tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for the task: (returnval){ [ 813.565995] env[70020]: value = "task-3618218" [ 813.565995] env[70020]: _type = "Task" [ 813.565995] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.574594] env[70020]: DEBUG oslo_vmware.api [None req-0ff7a455-c9a7-4027-a1fc-656caa3957fe tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618218, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.575283] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0f96e663-b33c-4a9a-9d88-121b1c4413f6 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "8adadb2e-2a20-45b1-bed8-34e09df25f39" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 813.575375] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0f96e663-b33c-4a9a-9d88-121b1c4413f6 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "8adadb2e-2a20-45b1-bed8-34e09df25f39" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 813.575612] env[70020]: INFO nova.compute.manager [None req-0f96e663-b33c-4a9a-9d88-121b1c4413f6 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Rebooting instance [ 813.697516] env[70020]: DEBUG oslo_vmware.api [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Task: {'id': task-3618216, 'name': Rename_Task, 'duration_secs': 1.158014} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.701026] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] [instance: b99195a6-866e-4142-970a-42a0564889ef] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 813.701026] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-69328344-6ca8-4f52-bf6a-64d00794dc2f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.706800] env[70020]: DEBUG oslo_vmware.api [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Waiting for the task: (returnval){ [ 813.706800] env[70020]: value = "task-3618219" [ 813.706800] env[70020]: _type = "Task" [ 813.706800] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.717429] env[70020]: DEBUG oslo_vmware.api [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Task: {'id': task-3618219, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.730647] env[70020]: INFO nova.compute.manager [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Rebuilding instance [ 813.769466] env[70020]: DEBUG nova.network.neutron [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: f7a42358-f26a-4651-a929-d3836f050648] Successfully updated port: 40c50349-5efa-4257-b479-b8632c4d6a66 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 813.810964] env[70020]: DEBUG nova.compute.manager [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 813.812106] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0103e76-4ee5-4e9b-86b2-3dbe8bd8f3e0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.953794] env[70020]: DEBUG nova.compute.manager [req-a96d53fa-7f7e-439e-a062-776a018bd663 req-b5f5b3d2-ced3-40be-a217-113fb2ac500d service nova] [instance: f7a42358-f26a-4651-a929-d3836f050648] Received event network-vif-plugged-40c50349-5efa-4257-b479-b8632c4d6a66 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 813.954023] env[70020]: DEBUG oslo_concurrency.lockutils [req-a96d53fa-7f7e-439e-a062-776a018bd663 req-b5f5b3d2-ced3-40be-a217-113fb2ac500d service nova] Acquiring lock "f7a42358-f26a-4651-a929-d3836f050648-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 813.954261] env[70020]: DEBUG oslo_concurrency.lockutils [req-a96d53fa-7f7e-439e-a062-776a018bd663 req-b5f5b3d2-ced3-40be-a217-113fb2ac500d service nova] Lock "f7a42358-f26a-4651-a929-d3836f050648-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 813.954389] env[70020]: DEBUG oslo_concurrency.lockutils [req-a96d53fa-7f7e-439e-a062-776a018bd663 req-b5f5b3d2-ced3-40be-a217-113fb2ac500d service nova] Lock "f7a42358-f26a-4651-a929-d3836f050648-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 813.954552] env[70020]: DEBUG nova.compute.manager [req-a96d53fa-7f7e-439e-a062-776a018bd663 req-b5f5b3d2-ced3-40be-a217-113fb2ac500d service nova] [instance: f7a42358-f26a-4651-a929-d3836f050648] No waiting events found dispatching network-vif-plugged-40c50349-5efa-4257-b479-b8632c4d6a66 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 813.954709] env[70020]: WARNING nova.compute.manager [req-a96d53fa-7f7e-439e-a062-776a018bd663 req-b5f5b3d2-ced3-40be-a217-113fb2ac500d service nova] [instance: f7a42358-f26a-4651-a929-d3836f050648] Received unexpected event network-vif-plugged-40c50349-5efa-4257-b479-b8632c4d6a66 for instance with vm_state building and task_state spawning. [ 813.960709] env[70020]: DEBUG oslo_vmware.api [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618217, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.035928] env[70020]: DEBUG nova.compute.utils [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 814.037774] env[70020]: DEBUG nova.compute.manager [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 814.037955] env[70020]: DEBUG nova.network.neutron [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 814.077838] env[70020]: DEBUG oslo_vmware.api [None req-0ff7a455-c9a7-4027-a1fc-656caa3957fe tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618218, 'name': PowerOnVM_Task, 'duration_secs': 0.501379} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.080632] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ff7a455-c9a7-4027-a1fc-656caa3957fe tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 814.080811] env[70020]: DEBUG nova.compute.manager [None req-0ff7a455-c9a7-4027-a1fc-656caa3957fe tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 814.082848] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-243a3506-2030-4ced-b14c-7a2f74b07a4c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.103309] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0f96e663-b33c-4a9a-9d88-121b1c4413f6 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "refresh_cache-8adadb2e-2a20-45b1-bed8-34e09df25f39" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.103574] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0f96e663-b33c-4a9a-9d88-121b1c4413f6 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquired lock "refresh_cache-8adadb2e-2a20-45b1-bed8-34e09df25f39" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 814.103748] env[70020]: DEBUG nova.network.neutron [None req-0f96e663-b33c-4a9a-9d88-121b1c4413f6 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 814.106402] env[70020]: DEBUG nova.policy [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4413645e57d6483887bd6431f71360eb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '90641c26c4064f219bf2e52694da4e0d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 814.193959] env[70020]: DEBUG oslo_concurrency.lockutils [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "96966bf2-a9ff-48ba-be3f-c767e7b6eedd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 814.196075] env[70020]: DEBUG oslo_concurrency.lockutils [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "96966bf2-a9ff-48ba-be3f-c767e7b6eedd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 814.219261] env[70020]: DEBUG oslo_vmware.api [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Task: {'id': task-3618219, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.272696] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "refresh_cache-f7a42358-f26a-4651-a929-d3836f050648" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.272794] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquired lock "refresh_cache-f7a42358-f26a-4651-a929-d3836f050648" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 814.272965] env[70020]: DEBUG nova.network.neutron [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: f7a42358-f26a-4651-a929-d3836f050648] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 814.459498] env[70020]: DEBUG oslo_vmware.api [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618217, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.537028} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.459769] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] f56e88f6-3a25-44d9-bdb1-cc4291169c9c/f56e88f6-3a25-44d9-bdb1-cc4291169c9c.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 814.460212] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 814.460480] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-766ade1f-2e8c-4c36-aa2a-6d6c335327c7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.470127] env[70020]: DEBUG oslo_vmware.api [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 814.470127] env[70020]: value = "task-3618220" [ 814.470127] env[70020]: _type = "Task" [ 814.470127] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.480530] env[70020]: DEBUG oslo_vmware.api [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618220, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.541415] env[70020]: DEBUG nova.compute.utils [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 814.577192] env[70020]: DEBUG nova.network.neutron [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Successfully created port: 320d056b-ab7e-455d-a9dc-f443a22fc563 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 814.721505] env[70020]: DEBUG oslo_vmware.api [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Task: {'id': task-3618219, 'name': PowerOnVM_Task, 'duration_secs': 0.636076} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.724175] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] [instance: b99195a6-866e-4142-970a-42a0564889ef] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 814.724379] env[70020]: INFO nova.compute.manager [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] [instance: b99195a6-866e-4142-970a-42a0564889ef] Took 9.66 seconds to spawn the instance on the hypervisor. [ 814.724558] env[70020]: DEBUG nova.compute.manager [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] [instance: b99195a6-866e-4142-970a-42a0564889ef] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 814.725572] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf54ebed-f168-46d7-abe7-dda9d8f7bfb9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.817342] env[70020]: DEBUG nova.network.neutron [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: f7a42358-f26a-4651-a929-d3836f050648] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 814.829381] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 814.829867] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-80e2daef-17ed-4606-9d39-3e868a8085d6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.841104] env[70020]: DEBUG oslo_vmware.api [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 814.841104] env[70020]: value = "task-3618221" [ 814.841104] env[70020]: _type = "Task" [ 814.841104] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.853062] env[70020]: DEBUG oslo_vmware.api [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618221, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.903483] env[70020]: DEBUG nova.network.neutron [None req-0f96e663-b33c-4a9a-9d88-121b1c4413f6 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Updating instance_info_cache with network_info: [{"id": "4b681dd6-fab3-4812-988e-26b219b6c5c3", "address": "fa:16:3e:29:86:67", "network": {"id": "ba2c4d3c-4191-4de5-8533-90ca5dfc1dc0", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-599216784-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e3eae740ef84ef88aef113ed4d6e57b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b681dd6-fa", "ovs_interfaceid": "4b681dd6-fab3-4812-988e-26b219b6c5c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.981214] env[70020]: DEBUG oslo_vmware.api [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618220, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.236051} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.981496] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 814.982238] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-570a33a6-dc5b-42b6-a2ac-adee50b1b13b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.005423] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Reconfiguring VM instance instance-00000032 to attach disk [datastore2] f56e88f6-3a25-44d9-bdb1-cc4291169c9c/f56e88f6-3a25-44d9-bdb1-cc4291169c9c.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 815.006464] env[70020]: DEBUG nova.network.neutron [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: f7a42358-f26a-4651-a929-d3836f050648] Updating instance_info_cache with network_info: [{"id": "40c50349-5efa-4257-b479-b8632c4d6a66", "address": "fa:16:3e:d2:23:a3", "network": {"id": "bff860aa-ea09-4260-84e8-d9ffd2c5bf4b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1893681432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd3733a000724aab9255cb498cecdfba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40c50349-5e", "ovs_interfaceid": "40c50349-5efa-4257-b479-b8632c4d6a66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 815.009842] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b4439f56-f247-40ae-b66c-305e9e7a3288 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.032356] env[70020]: DEBUG oslo_vmware.api [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 815.032356] env[70020]: value = "task-3618222" [ 815.032356] env[70020]: _type = "Task" [ 815.032356] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.044286] env[70020]: DEBUG nova.compute.manager [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 815.047263] env[70020]: DEBUG oslo_vmware.api [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618222, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.103842] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57707315-a1bc-461b-829b-449620e9e704 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.112026] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c1c3af3-0fcb-48eb-8a4f-b2f19f8959ab {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.143094] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d99abf-0a13-46e4-871d-1c40af1c5356 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.151273] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7126dc3-9af7-4d3e-a37a-55884d48d472 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.166372] env[70020]: DEBUG nova.compute.provider_tree [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 815.246787] env[70020]: INFO nova.compute.manager [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] [instance: b99195a6-866e-4142-970a-42a0564889ef] Took 37.54 seconds to build instance. [ 815.353178] env[70020]: DEBUG oslo_vmware.api [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618221, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.407058] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0f96e663-b33c-4a9a-9d88-121b1c4413f6 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Releasing lock "refresh_cache-8adadb2e-2a20-45b1-bed8-34e09df25f39" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 815.526410] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Releasing lock "refresh_cache-f7a42358-f26a-4651-a929-d3836f050648" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 815.528454] env[70020]: DEBUG nova.compute.manager [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: f7a42358-f26a-4651-a929-d3836f050648] Instance network_info: |[{"id": "40c50349-5efa-4257-b479-b8632c4d6a66", "address": "fa:16:3e:d2:23:a3", "network": {"id": "bff860aa-ea09-4260-84e8-d9ffd2c5bf4b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1893681432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd3733a000724aab9255cb498cecdfba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40c50349-5e", "ovs_interfaceid": "40c50349-5efa-4257-b479-b8632c4d6a66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 815.529104] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: f7a42358-f26a-4651-a929-d3836f050648] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d2:23:a3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3952eb02-1162-48ed-8227-9c138960d583', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '40c50349-5efa-4257-b479-b8632c4d6a66', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 815.540272] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 815.540837] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7a42358-f26a-4651-a929-d3836f050648] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 815.544690] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-254de1ce-8977-40ae-93f7-4d3f143222b4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.568898] env[70020]: DEBUG oslo_vmware.api [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618222, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.570222] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 815.570222] env[70020]: value = "task-3618223" [ 815.570222] env[70020]: _type = "Task" [ 815.570222] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.577983] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618223, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.669315] env[70020]: DEBUG nova.scheduler.client.report [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 815.748842] env[70020]: DEBUG oslo_concurrency.lockutils [None req-12c37d1e-4a8c-4334-b89e-b54cd80e651a tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Lock "b99195a6-866e-4142-970a-42a0564889ef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 88.420s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 815.854560] env[70020]: DEBUG oslo_vmware.api [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618221, 'name': PowerOffVM_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.912968] env[70020]: DEBUG nova.compute.manager [None req-0f96e663-b33c-4a9a-9d88-121b1c4413f6 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 815.913867] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-532bc892-00cc-4aa6-9b4b-83d8ca587f39 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.027559] env[70020]: DEBUG nova.compute.manager [req-d4830cfa-d7bd-4f96-9abe-bd6b41465cf9 req-0912b9bc-140f-4e6f-8de5-f6df350ce911 service nova] [instance: f7a42358-f26a-4651-a929-d3836f050648] Received event network-changed-40c50349-5efa-4257-b479-b8632c4d6a66 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 816.027559] env[70020]: DEBUG nova.compute.manager [req-d4830cfa-d7bd-4f96-9abe-bd6b41465cf9 req-0912b9bc-140f-4e6f-8de5-f6df350ce911 service nova] [instance: f7a42358-f26a-4651-a929-d3836f050648] Refreshing instance network info cache due to event network-changed-40c50349-5efa-4257-b479-b8632c4d6a66. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 816.027559] env[70020]: DEBUG oslo_concurrency.lockutils [req-d4830cfa-d7bd-4f96-9abe-bd6b41465cf9 req-0912b9bc-140f-4e6f-8de5-f6df350ce911 service nova] Acquiring lock "refresh_cache-f7a42358-f26a-4651-a929-d3836f050648" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.027559] env[70020]: DEBUG oslo_concurrency.lockutils [req-d4830cfa-d7bd-4f96-9abe-bd6b41465cf9 req-0912b9bc-140f-4e6f-8de5-f6df350ce911 service nova] Acquired lock "refresh_cache-f7a42358-f26a-4651-a929-d3836f050648" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 816.027722] env[70020]: DEBUG nova.network.neutron [req-d4830cfa-d7bd-4f96-9abe-bd6b41465cf9 req-0912b9bc-140f-4e6f-8de5-f6df350ce911 service nova] [instance: f7a42358-f26a-4651-a929-d3836f050648] Refreshing network info cache for port 40c50349-5efa-4257-b479-b8632c4d6a66 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 816.052261] env[70020]: DEBUG oslo_vmware.api [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618222, 'name': ReconfigVM_Task, 'duration_secs': 0.975633} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.052549] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Reconfigured VM instance instance-00000032 to attach disk [datastore2] f56e88f6-3a25-44d9-bdb1-cc4291169c9c/f56e88f6-3a25-44d9-bdb1-cc4291169c9c.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 816.053246] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8a743448-62ed-42bd-8e99-a913684c9bba {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.059426] env[70020]: DEBUG oslo_vmware.api [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 816.059426] env[70020]: value = "task-3618224" [ 816.059426] env[70020]: _type = "Task" [ 816.059426] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.064999] env[70020]: DEBUG nova.compute.manager [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 816.075078] env[70020]: DEBUG oslo_vmware.api [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618224, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.089312] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618223, 'name': CreateVM_Task, 'duration_secs': 0.402616} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.089478] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7a42358-f26a-4651-a929-d3836f050648] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 816.090367] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.090367] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 816.090967] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 816.090967] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11c76a80-d006-42b4-ba5c-84822b7f5033 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.099061] env[70020]: DEBUG oslo_vmware.api [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 816.099061] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]527c3c20-67cb-1097-6228-abe57f2d2234" [ 816.099061] env[70020]: _type = "Task" [ 816.099061] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.106963] env[70020]: DEBUG nova.virt.hardware [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T23:01:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='1383205440',id=28,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-2016899399',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 816.107216] env[70020]: DEBUG nova.virt.hardware [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 816.107373] env[70020]: DEBUG nova.virt.hardware [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 816.107550] env[70020]: DEBUG nova.virt.hardware [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 816.107691] env[70020]: DEBUG nova.virt.hardware [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 816.107834] env[70020]: DEBUG nova.virt.hardware [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 816.108048] env[70020]: DEBUG nova.virt.hardware [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 816.108211] env[70020]: DEBUG nova.virt.hardware [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 816.108379] env[70020]: DEBUG nova.virt.hardware [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 816.108536] env[70020]: DEBUG nova.virt.hardware [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 816.108851] env[70020]: DEBUG nova.virt.hardware [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 816.109843] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-151410ef-05b7-48c8-9d93-40cddd6dd1d8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.117668] env[70020]: DEBUG oslo_vmware.api [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]527c3c20-67cb-1097-6228-abe57f2d2234, 'name': SearchDatastore_Task, 'duration_secs': 0.010662} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.119803] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 816.120045] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: f7a42358-f26a-4651-a929-d3836f050648] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 816.120287] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.120422] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 816.120665] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 816.120874] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c6cb09bd-8ebf-40fa-af1b-87de5a6cf087 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.123577] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-866f1ed1-a93a-482b-a7a1-b61dd098a549 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.141324] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 816.141522] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 816.142301] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06aeb1b9-696c-4bfd-a848-eba505e4cbd0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.147239] env[70020]: DEBUG oslo_vmware.api [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 816.147239] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52593cbd-62ee-28d7-d222-4b9e4b7d4cd4" [ 816.147239] env[70020]: _type = "Task" [ 816.147239] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.154779] env[70020]: DEBUG oslo_vmware.api [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52593cbd-62ee-28d7-d222-4b9e4b7d4cd4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.176892] env[70020]: DEBUG oslo_concurrency.lockutils [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.651s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 816.177437] env[70020]: DEBUG nova.compute.manager [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 816.183428] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.756s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 816.183428] env[70020]: INFO nova.compute.claims [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 816.254050] env[70020]: DEBUG nova.compute.manager [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 816.356031] env[70020]: DEBUG oslo_vmware.api [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618221, 'name': PowerOffVM_Task, 'duration_secs': 1.04328} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.356031] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 816.356031] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 816.356031] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66a111ee-dcf6-439e-9bb4-b209dd6c4539 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.362339] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 816.362580] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-01ba9d45-b6c2-4377-b2ee-418936e56d52 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.426434] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 816.426641] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 816.426835] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Deleting the datastore file [datastore2] ea97f6ab-057e-44d3-835a-68b46d241621 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 816.427555] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e6b45f9f-2b8d-46bb-a034-595b9fc4b4bd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.434653] env[70020]: DEBUG oslo_vmware.api [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 816.434653] env[70020]: value = "task-3618226" [ 816.434653] env[70020]: _type = "Task" [ 816.434653] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.442467] env[70020]: DEBUG oslo_vmware.api [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618226, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.504538] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ec001563-4b07-487a-83d5-db05d72e8e1f tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Acquiring lock "b99195a6-866e-4142-970a-42a0564889ef" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 816.504805] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ec001563-4b07-487a-83d5-db05d72e8e1f tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Lock "b99195a6-866e-4142-970a-42a0564889ef" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 816.505026] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ec001563-4b07-487a-83d5-db05d72e8e1f tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Acquiring lock "b99195a6-866e-4142-970a-42a0564889ef-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 816.505227] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ec001563-4b07-487a-83d5-db05d72e8e1f tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Lock "b99195a6-866e-4142-970a-42a0564889ef-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 816.505406] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ec001563-4b07-487a-83d5-db05d72e8e1f tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Lock "b99195a6-866e-4142-970a-42a0564889ef-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 816.507662] env[70020]: INFO nova.compute.manager [None req-ec001563-4b07-487a-83d5-db05d72e8e1f tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] [instance: b99195a6-866e-4142-970a-42a0564889ef] Terminating instance [ 816.547371] env[70020]: DEBUG nova.network.neutron [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Successfully updated port: 320d056b-ab7e-455d-a9dc-f443a22fc563 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 816.572959] env[70020]: DEBUG oslo_vmware.api [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618224, 'name': Rename_Task, 'duration_secs': 0.134729} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.572959] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 816.573204] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-31c2ce86-756c-4c78-8331-a39d8ab32cb4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.579493] env[70020]: DEBUG oslo_vmware.api [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 816.579493] env[70020]: value = "task-3618227" [ 816.579493] env[70020]: _type = "Task" [ 816.579493] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.587205] env[70020]: DEBUG oslo_vmware.api [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618227, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.657571] env[70020]: DEBUG oslo_vmware.api [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52593cbd-62ee-28d7-d222-4b9e4b7d4cd4, 'name': SearchDatastore_Task, 'duration_secs': 0.010969} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.658426] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-011e1414-3df0-47c8-9e06-f032334641f2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.663814] env[70020]: DEBUG oslo_vmware.api [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 816.663814] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52f1f09e-5c95-80e2-3474-b66d08a4f3c0" [ 816.663814] env[70020]: _type = "Task" [ 816.663814] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.675541] env[70020]: DEBUG oslo_vmware.api [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52f1f09e-5c95-80e2-3474-b66d08a4f3c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.686134] env[70020]: DEBUG nova.compute.utils [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 816.689515] env[70020]: DEBUG nova.compute.manager [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 816.689705] env[70020]: DEBUG nova.network.neutron [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 816.740404] env[70020]: DEBUG nova.network.neutron [req-d4830cfa-d7bd-4f96-9abe-bd6b41465cf9 req-0912b9bc-140f-4e6f-8de5-f6df350ce911 service nova] [instance: f7a42358-f26a-4651-a929-d3836f050648] Updated VIF entry in instance network info cache for port 40c50349-5efa-4257-b479-b8632c4d6a66. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 816.740761] env[70020]: DEBUG nova.network.neutron [req-d4830cfa-d7bd-4f96-9abe-bd6b41465cf9 req-0912b9bc-140f-4e6f-8de5-f6df350ce911 service nova] [instance: f7a42358-f26a-4651-a929-d3836f050648] Updating instance_info_cache with network_info: [{"id": "40c50349-5efa-4257-b479-b8632c4d6a66", "address": "fa:16:3e:d2:23:a3", "network": {"id": "bff860aa-ea09-4260-84e8-d9ffd2c5bf4b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1893681432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd3733a000724aab9255cb498cecdfba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40c50349-5e", "ovs_interfaceid": "40c50349-5efa-4257-b479-b8632c4d6a66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 816.761433] env[70020]: DEBUG nova.policy [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b511ec320fdc4dacab9e6f66a50f625c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'affdcbe1612b434697a53a8692ef77a4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 816.778703] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 816.929813] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-815b21fd-b72a-4308-9ad5-4ef41a9e9da4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.942172] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0f96e663-b33c-4a9a-9d88-121b1c4413f6 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Doing hard reboot of VM {{(pid=70020) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 816.943109] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-0ffe7fc7-f802-41c1-87cd-f2803a0089d4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.947716] env[70020]: DEBUG oslo_vmware.api [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618226, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.189687} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.948350] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 816.948519] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 816.948688] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 816.953362] env[70020]: DEBUG oslo_vmware.api [None req-0f96e663-b33c-4a9a-9d88-121b1c4413f6 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 816.953362] env[70020]: value = "task-3618228" [ 816.953362] env[70020]: _type = "Task" [ 816.953362] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.962505] env[70020]: DEBUG oslo_vmware.api [None req-0f96e663-b33c-4a9a-9d88-121b1c4413f6 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618228, 'name': ResetVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.011633] env[70020]: DEBUG nova.compute.manager [None req-ec001563-4b07-487a-83d5-db05d72e8e1f tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] [instance: b99195a6-866e-4142-970a-42a0564889ef] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 817.012032] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ec001563-4b07-487a-83d5-db05d72e8e1f tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] [instance: b99195a6-866e-4142-970a-42a0564889ef] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 817.013108] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70f44976-75e0-4857-8b74-5d53bd350608 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.020732] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec001563-4b07-487a-83d5-db05d72e8e1f tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] [instance: b99195a6-866e-4142-970a-42a0564889ef] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 817.021012] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ecf99725-4db7-4de0-9453-0445184470cb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.028492] env[70020]: DEBUG oslo_vmware.api [None req-ec001563-4b07-487a-83d5-db05d72e8e1f tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Waiting for the task: (returnval){ [ 817.028492] env[70020]: value = "task-3618229" [ 817.028492] env[70020]: _type = "Task" [ 817.028492] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.037960] env[70020]: DEBUG oslo_vmware.api [None req-ec001563-4b07-487a-83d5-db05d72e8e1f tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Task: {'id': task-3618229, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.051962] env[70020]: DEBUG oslo_concurrency.lockutils [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Acquiring lock "refresh_cache-f16d60a4-5f80-4f41-b994-068de48775ad" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.052205] env[70020]: DEBUG oslo_concurrency.lockutils [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Acquired lock "refresh_cache-f16d60a4-5f80-4f41-b994-068de48775ad" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 817.052397] env[70020]: DEBUG nova.network.neutron [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 817.088928] env[70020]: DEBUG oslo_vmware.api [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618227, 'name': PowerOnVM_Task, 'duration_secs': 0.508572} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.089196] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 817.089444] env[70020]: INFO nova.compute.manager [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Took 9.25 seconds to spawn the instance on the hypervisor. [ 817.089670] env[70020]: DEBUG nova.compute.manager [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 817.091124] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ed4615-3313-4a78-ac34-f46afd5dc217 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.175203] env[70020]: DEBUG oslo_vmware.api [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52f1f09e-5c95-80e2-3474-b66d08a4f3c0, 'name': SearchDatastore_Task, 'duration_secs': 0.011183} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.175477] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 817.175733] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] f7a42358-f26a-4651-a929-d3836f050648/f7a42358-f26a-4651-a929-d3836f050648.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 817.175988] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-292d6b59-c3e7-4423-8dd5-f0ab343c3cef {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.182379] env[70020]: DEBUG oslo_vmware.api [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 817.182379] env[70020]: value = "task-3618230" [ 817.182379] env[70020]: _type = "Task" [ 817.182379] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.192285] env[70020]: DEBUG nova.compute.manager [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 817.199677] env[70020]: DEBUG oslo_vmware.api [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618230, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.246573] env[70020]: DEBUG oslo_concurrency.lockutils [req-d4830cfa-d7bd-4f96-9abe-bd6b41465cf9 req-0912b9bc-140f-4e6f-8de5-f6df350ce911 service nova] Releasing lock "refresh_cache-f7a42358-f26a-4651-a929-d3836f050648" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 817.334634] env[70020]: DEBUG nova.network.neutron [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Successfully created port: f1554235-9a1a-4e1a-9f15-a47bfe87eddd {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 817.465355] env[70020]: DEBUG oslo_vmware.api [None req-0f96e663-b33c-4a9a-9d88-121b1c4413f6 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618228, 'name': ResetVM_Task, 'duration_secs': 0.103028} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.465638] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0f96e663-b33c-4a9a-9d88-121b1c4413f6 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Did hard reboot of VM {{(pid=70020) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 817.465829] env[70020]: DEBUG nova.compute.manager [None req-0f96e663-b33c-4a9a-9d88-121b1c4413f6 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 817.466598] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55cacff6-43bc-46f5-b046-46d7bace52fa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.542093] env[70020]: DEBUG oslo_vmware.api [None req-ec001563-4b07-487a-83d5-db05d72e8e1f tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Task: {'id': task-3618229, 'name': PowerOffVM_Task, 'duration_secs': 0.292022} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.542093] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec001563-4b07-487a-83d5-db05d72e8e1f tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] [instance: b99195a6-866e-4142-970a-42a0564889ef] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 817.542093] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ec001563-4b07-487a-83d5-db05d72e8e1f tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] [instance: b99195a6-866e-4142-970a-42a0564889ef] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 817.542093] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-da86fa41-a2e8-4ef2-925b-83ed683154f5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.595595] env[70020]: DEBUG nova.network.neutron [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 817.609876] env[70020]: INFO nova.compute.manager [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Took 34.16 seconds to build instance. [ 817.613653] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ec001563-4b07-487a-83d5-db05d72e8e1f tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] [instance: b99195a6-866e-4142-970a-42a0564889ef] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 817.613653] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ec001563-4b07-487a-83d5-db05d72e8e1f tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] [instance: b99195a6-866e-4142-970a-42a0564889ef] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 817.613653] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec001563-4b07-487a-83d5-db05d72e8e1f tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Deleting the datastore file [datastore2] b99195a6-866e-4142-970a-42a0564889ef {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 817.613653] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ff25bc8d-cb5e-4b3f-b684-346bf30c6e60 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.620484] env[70020]: DEBUG oslo_vmware.api [None req-ec001563-4b07-487a-83d5-db05d72e8e1f tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Waiting for the task: (returnval){ [ 817.620484] env[70020]: value = "task-3618232" [ 817.620484] env[70020]: _type = "Task" [ 817.620484] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.633847] env[70020]: DEBUG oslo_vmware.api [None req-ec001563-4b07-487a-83d5-db05d72e8e1f tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Task: {'id': task-3618232, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.710914] env[70020]: DEBUG oslo_vmware.api [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618230, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.804849] env[70020]: DEBUG nova.network.neutron [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Updating instance_info_cache with network_info: [{"id": "320d056b-ab7e-455d-a9dc-f443a22fc563", "address": "fa:16:3e:77:e3:59", "network": {"id": "47f58371-9cbc-4ed3-98be-09900c36cbf3", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-119128355-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90641c26c4064f219bf2e52694da4e0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a94c29-ddd5-4383-9219-1c2c3bb09cc5", "external-id": "nsx-vlan-transportzone-2", "segmentation_id": 2, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap320d056b-ab", "ovs_interfaceid": "320d056b-ab7e-455d-a9dc-f443a22fc563", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 817.817867] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55e24b1c-e47b-4fc9-8534-1157064508bf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.827202] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f36a0144-b972-4cb9-9ac8-5a2eeda688af {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.861054] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b99611b-d4fb-4ec4-9df6-e97aef97bcc4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.868676] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a1b9498-d08c-4083-be85-8c6e3f4a015d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.884344] env[70020]: DEBUG nova.compute.provider_tree [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 817.986295] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0f96e663-b33c-4a9a-9d88-121b1c4413f6 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "8adadb2e-2a20-45b1-bed8-34e09df25f39" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.411s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 817.991883] env[70020]: DEBUG nova.virt.hardware [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 817.992237] env[70020]: DEBUG nova.virt.hardware [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 817.992475] env[70020]: DEBUG nova.virt.hardware [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 817.993472] env[70020]: DEBUG nova.virt.hardware [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 817.993472] env[70020]: DEBUG nova.virt.hardware [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 817.993472] env[70020]: DEBUG nova.virt.hardware [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 817.993472] env[70020]: DEBUG nova.virt.hardware [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 817.993472] env[70020]: DEBUG nova.virt.hardware [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 817.993472] env[70020]: DEBUG nova.virt.hardware [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 817.993791] env[70020]: DEBUG nova.virt.hardware [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 817.993791] env[70020]: DEBUG nova.virt.hardware [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 817.994636] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53881780-9c32-4959-96c4-2989e846d898 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.002630] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a65787d-47ba-46c2-9744-c0996b74d2a9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.016900] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:aa:ce:b2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39ab9baf-90cd-4fe2-8d56-434f8210fc19', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6348da2f-b0bd-499f-bf5e-b14a38d29438', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 818.024221] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 818.024559] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 818.024978] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bf7ea0b5-a4af-4d2a-91ff-4e4cc8a17354 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.044612] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 818.044612] env[70020]: value = "task-3618233" [ 818.044612] env[70020]: _type = "Task" [ 818.044612] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.053030] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618233, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.055158] env[70020]: DEBUG nova.compute.manager [req-4353f6dd-f4fe-4cf3-b75e-de6a1a546c09 req-127e8bcc-73d2-4c21-8890-182fa40a09ad service nova] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Received event network-vif-plugged-320d056b-ab7e-455d-a9dc-f443a22fc563 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 818.055324] env[70020]: DEBUG oslo_concurrency.lockutils [req-4353f6dd-f4fe-4cf3-b75e-de6a1a546c09 req-127e8bcc-73d2-4c21-8890-182fa40a09ad service nova] Acquiring lock "f16d60a4-5f80-4f41-b994-068de48775ad-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 818.055800] env[70020]: DEBUG oslo_concurrency.lockutils [req-4353f6dd-f4fe-4cf3-b75e-de6a1a546c09 req-127e8bcc-73d2-4c21-8890-182fa40a09ad service nova] Lock "f16d60a4-5f80-4f41-b994-068de48775ad-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 818.055800] env[70020]: DEBUG oslo_concurrency.lockutils [req-4353f6dd-f4fe-4cf3-b75e-de6a1a546c09 req-127e8bcc-73d2-4c21-8890-182fa40a09ad service nova] Lock "f16d60a4-5f80-4f41-b994-068de48775ad-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 818.056147] env[70020]: DEBUG nova.compute.manager [req-4353f6dd-f4fe-4cf3-b75e-de6a1a546c09 req-127e8bcc-73d2-4c21-8890-182fa40a09ad service nova] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] No waiting events found dispatching network-vif-plugged-320d056b-ab7e-455d-a9dc-f443a22fc563 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 818.056147] env[70020]: WARNING nova.compute.manager [req-4353f6dd-f4fe-4cf3-b75e-de6a1a546c09 req-127e8bcc-73d2-4c21-8890-182fa40a09ad service nova] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Received unexpected event network-vif-plugged-320d056b-ab7e-455d-a9dc-f443a22fc563 for instance with vm_state building and task_state spawning. [ 818.056254] env[70020]: DEBUG nova.compute.manager [req-4353f6dd-f4fe-4cf3-b75e-de6a1a546c09 req-127e8bcc-73d2-4c21-8890-182fa40a09ad service nova] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Received event network-changed-320d056b-ab7e-455d-a9dc-f443a22fc563 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 818.056296] env[70020]: DEBUG nova.compute.manager [req-4353f6dd-f4fe-4cf3-b75e-de6a1a546c09 req-127e8bcc-73d2-4c21-8890-182fa40a09ad service nova] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Refreshing instance network info cache due to event network-changed-320d056b-ab7e-455d-a9dc-f443a22fc563. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 818.056486] env[70020]: DEBUG oslo_concurrency.lockutils [req-4353f6dd-f4fe-4cf3-b75e-de6a1a546c09 req-127e8bcc-73d2-4c21-8890-182fa40a09ad service nova] Acquiring lock "refresh_cache-f16d60a4-5f80-4f41-b994-068de48775ad" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.114546] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c5d8f147-0bd3-46b6-bc13-24ad436d43c7 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "f56e88f6-3a25-44d9-bdb1-cc4291169c9c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 90.598s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 818.131911] env[70020]: DEBUG oslo_vmware.api [None req-ec001563-4b07-487a-83d5-db05d72e8e1f tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Task: {'id': task-3618232, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.279939} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.132214] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec001563-4b07-487a-83d5-db05d72e8e1f tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 818.132428] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ec001563-4b07-487a-83d5-db05d72e8e1f tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] [instance: b99195a6-866e-4142-970a-42a0564889ef] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 818.133016] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ec001563-4b07-487a-83d5-db05d72e8e1f tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] [instance: b99195a6-866e-4142-970a-42a0564889ef] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 818.133016] env[70020]: INFO nova.compute.manager [None req-ec001563-4b07-487a-83d5-db05d72e8e1f tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] [instance: b99195a6-866e-4142-970a-42a0564889ef] Took 1.12 seconds to destroy the instance on the hypervisor. [ 818.133136] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ec001563-4b07-487a-83d5-db05d72e8e1f tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 818.133259] env[70020]: DEBUG nova.compute.manager [-] [instance: b99195a6-866e-4142-970a-42a0564889ef] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 818.133366] env[70020]: DEBUG nova.network.neutron [-] [instance: b99195a6-866e-4142-970a-42a0564889ef] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 818.197547] env[70020]: DEBUG oslo_vmware.api [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618230, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.642738} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.198123] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] f7a42358-f26a-4651-a929-d3836f050648/f7a42358-f26a-4651-a929-d3836f050648.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 818.198656] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: f7a42358-f26a-4651-a929-d3836f050648] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 818.199038] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-21872fb8-cfac-48e6-9c5e-ddf87d196235 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.206573] env[70020]: DEBUG oslo_vmware.api [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 818.206573] env[70020]: value = "task-3618234" [ 818.206573] env[70020]: _type = "Task" [ 818.206573] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.212696] env[70020]: DEBUG nova.compute.manager [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 818.220250] env[70020]: DEBUG oslo_vmware.api [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618234, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.237039] env[70020]: DEBUG nova.virt.hardware [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 818.237274] env[70020]: DEBUG nova.virt.hardware [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 818.237429] env[70020]: DEBUG nova.virt.hardware [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 818.237596] env[70020]: DEBUG nova.virt.hardware [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 818.237732] env[70020]: DEBUG nova.virt.hardware [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 818.237872] env[70020]: DEBUG nova.virt.hardware [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 818.238083] env[70020]: DEBUG nova.virt.hardware [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 818.238434] env[70020]: DEBUG nova.virt.hardware [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 818.238621] env[70020]: DEBUG nova.virt.hardware [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 818.238777] env[70020]: DEBUG nova.virt.hardware [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 818.238941] env[70020]: DEBUG nova.virt.hardware [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 818.239790] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e56dd27-01bd-4674-af53-9730972f5a3a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.248031] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dd1d535-8f8d-4944-888d-3ca9cb4a001d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.307462] env[70020]: DEBUG oslo_concurrency.lockutils [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Releasing lock "refresh_cache-f16d60a4-5f80-4f41-b994-068de48775ad" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 818.307820] env[70020]: DEBUG nova.compute.manager [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Instance network_info: |[{"id": "320d056b-ab7e-455d-a9dc-f443a22fc563", "address": "fa:16:3e:77:e3:59", "network": {"id": "47f58371-9cbc-4ed3-98be-09900c36cbf3", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-119128355-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90641c26c4064f219bf2e52694da4e0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a94c29-ddd5-4383-9219-1c2c3bb09cc5", "external-id": "nsx-vlan-transportzone-2", "segmentation_id": 2, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap320d056b-ab", "ovs_interfaceid": "320d056b-ab7e-455d-a9dc-f443a22fc563", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 818.308147] env[70020]: DEBUG oslo_concurrency.lockutils [req-4353f6dd-f4fe-4cf3-b75e-de6a1a546c09 req-127e8bcc-73d2-4c21-8890-182fa40a09ad service nova] Acquired lock "refresh_cache-f16d60a4-5f80-4f41-b994-068de48775ad" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 818.308340] env[70020]: DEBUG nova.network.neutron [req-4353f6dd-f4fe-4cf3-b75e-de6a1a546c09 req-127e8bcc-73d2-4c21-8890-182fa40a09ad service nova] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Refreshing network info cache for port 320d056b-ab7e-455d-a9dc-f443a22fc563 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 818.309819] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:77:e3:59', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '49a94c29-ddd5-4383-9219-1c2c3bb09cc5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '320d056b-ab7e-455d-a9dc-f443a22fc563', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 818.317824] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 818.320911] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 818.321655] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-872682b1-a8d5-4136-a5a3-71b9fe928a28 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.341635] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 818.341635] env[70020]: value = "task-3618235" [ 818.341635] env[70020]: _type = "Task" [ 818.341635] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.349755] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618235, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.386838] env[70020]: DEBUG nova.scheduler.client.report [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 818.554795] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618233, 'name': CreateVM_Task, 'duration_secs': 0.36495} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.555332] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 818.556014] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.556282] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 818.556560] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 818.556816] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f383cf37-958a-408c-a8f3-b789399f0c99 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.561861] env[70020]: DEBUG oslo_vmware.api [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 818.561861] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]520977ae-ba85-333d-df1d-3ccb2c5e132f" [ 818.561861] env[70020]: _type = "Task" [ 818.561861] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.569967] env[70020]: DEBUG oslo_vmware.api [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]520977ae-ba85-333d-df1d-3ccb2c5e132f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.572337] env[70020]: DEBUG nova.network.neutron [req-4353f6dd-f4fe-4cf3-b75e-de6a1a546c09 req-127e8bcc-73d2-4c21-8890-182fa40a09ad service nova] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Updated VIF entry in instance network info cache for port 320d056b-ab7e-455d-a9dc-f443a22fc563. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 818.572688] env[70020]: DEBUG nova.network.neutron [req-4353f6dd-f4fe-4cf3-b75e-de6a1a546c09 req-127e8bcc-73d2-4c21-8890-182fa40a09ad service nova] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Updating instance_info_cache with network_info: [{"id": "320d056b-ab7e-455d-a9dc-f443a22fc563", "address": "fa:16:3e:77:e3:59", "network": {"id": "47f58371-9cbc-4ed3-98be-09900c36cbf3", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-119128355-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90641c26c4064f219bf2e52694da4e0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a94c29-ddd5-4383-9219-1c2c3bb09cc5", "external-id": "nsx-vlan-transportzone-2", "segmentation_id": 2, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap320d056b-ab", "ovs_interfaceid": "320d056b-ab7e-455d-a9dc-f443a22fc563", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.592349] env[70020]: DEBUG nova.compute.manager [req-36e60896-f745-4b96-a4b3-39507d0e67fd req-57ef2fe8-5dec-4827-b144-dac197d04b5e service nova] [instance: b99195a6-866e-4142-970a-42a0564889ef] Received event network-vif-deleted-9cdec97e-ce57-46c5-8d8f-1425a3452a72 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 818.592579] env[70020]: INFO nova.compute.manager [req-36e60896-f745-4b96-a4b3-39507d0e67fd req-57ef2fe8-5dec-4827-b144-dac197d04b5e service nova] [instance: b99195a6-866e-4142-970a-42a0564889ef] Neutron deleted interface 9cdec97e-ce57-46c5-8d8f-1425a3452a72; detaching it from the instance and deleting it from the info cache [ 818.592798] env[70020]: DEBUG nova.network.neutron [req-36e60896-f745-4b96-a4b3-39507d0e67fd req-57ef2fe8-5dec-4827-b144-dac197d04b5e service nova] [instance: b99195a6-866e-4142-970a-42a0564889ef] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.617068] env[70020]: DEBUG nova.compute.manager [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 818.720080] env[70020]: DEBUG oslo_vmware.api [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618234, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088173} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.720345] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: f7a42358-f26a-4651-a929-d3836f050648] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 818.721139] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1b07470-b167-4b9c-a2a4-eadbcd76c747 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.746220] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: f7a42358-f26a-4651-a929-d3836f050648] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] f7a42358-f26a-4651-a929-d3836f050648/f7a42358-f26a-4651-a929-d3836f050648.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 818.746220] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c1e5b482-9473-4151-9ee2-6057a4fa5fdf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.765911] env[70020]: DEBUG oslo_vmware.api [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 818.765911] env[70020]: value = "task-3618236" [ 818.765911] env[70020]: _type = "Task" [ 818.765911] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.774459] env[70020]: DEBUG oslo_vmware.api [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618236, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.855235] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618235, 'name': CreateVM_Task} progress is 25%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.861247] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 818.861247] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 818.894033] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.712s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 818.894033] env[70020]: DEBUG nova.compute.manager [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 818.895772] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1ef4b391-5fed-4275-80cb-ac68c9126c66 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.523s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 818.896249] env[70020]: DEBUG nova.objects.instance [None req-1ef4b391-5fed-4275-80cb-ac68c9126c66 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Lazy-loading 'resources' on Instance uuid 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 818.917906] env[70020]: DEBUG nova.network.neutron [-] [instance: b99195a6-866e-4142-970a-42a0564889ef] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.073131] env[70020]: DEBUG oslo_vmware.api [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]520977ae-ba85-333d-df1d-3ccb2c5e132f, 'name': SearchDatastore_Task, 'duration_secs': 0.009963} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.073628] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 819.074043] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 819.074443] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.074752] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 819.075114] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 819.075732] env[70020]: DEBUG oslo_concurrency.lockutils [req-4353f6dd-f4fe-4cf3-b75e-de6a1a546c09 req-127e8bcc-73d2-4c21-8890-182fa40a09ad service nova] Releasing lock "refresh_cache-f16d60a4-5f80-4f41-b994-068de48775ad" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 819.076220] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fec796ab-c1d3-4e2a-8553-f247a5d57949 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.084845] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 819.085420] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 819.086307] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10124ec7-ebac-4f75-bcde-b7f29b67e07d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.095021] env[70020]: DEBUG oslo_vmware.api [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 819.095021] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]522c5177-7b08-2fed-da40-eb5487fabbc8" [ 819.095021] env[70020]: _type = "Task" [ 819.095021] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.097435] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d23c282c-0895-4a17-934c-27f3515189cb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.105944] env[70020]: DEBUG oslo_vmware.api [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]522c5177-7b08-2fed-da40-eb5487fabbc8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.114029] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1299542c-9d19-40e4-a4e9-83b14c8fd17e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.158982] env[70020]: DEBUG nova.compute.manager [req-36e60896-f745-4b96-a4b3-39507d0e67fd req-57ef2fe8-5dec-4827-b144-dac197d04b5e service nova] [instance: b99195a6-866e-4142-970a-42a0564889ef] Detach interface failed, port_id=9cdec97e-ce57-46c5-8d8f-1425a3452a72, reason: Instance b99195a6-866e-4142-970a-42a0564889ef could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 819.160018] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 819.276941] env[70020]: DEBUG oslo_vmware.api [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618236, 'name': ReconfigVM_Task, 'duration_secs': 0.292235} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.277334] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: f7a42358-f26a-4651-a929-d3836f050648] Reconfigured VM instance instance-00000033 to attach disk [datastore1] f7a42358-f26a-4651-a929-d3836f050648/f7a42358-f26a-4651-a929-d3836f050648.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 819.277979] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c23edf82-152c-44d3-997c-ba48b862488e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.286019] env[70020]: DEBUG oslo_vmware.api [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 819.286019] env[70020]: value = "task-3618237" [ 819.286019] env[70020]: _type = "Task" [ 819.286019] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.290060] env[70020]: DEBUG nova.network.neutron [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Successfully updated port: f1554235-9a1a-4e1a-9f15-a47bfe87eddd {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 819.296030] env[70020]: DEBUG oslo_vmware.api [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618237, 'name': Rename_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.351466] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618235, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.369060] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 819.369343] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 819.369628] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 819.369841] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 819.370237] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 819.370639] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 819.370639] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=70020) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 819.370703] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager.update_available_resource {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 819.399435] env[70020]: DEBUG nova.compute.utils [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 819.404726] env[70020]: DEBUG nova.compute.manager [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 819.404726] env[70020]: DEBUG nova.network.neutron [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 38839949-c717-4f0b-97a7-108d87417b88] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 819.418283] env[70020]: INFO nova.compute.manager [-] [instance: b99195a6-866e-4142-970a-42a0564889ef] Took 1.28 seconds to deallocate network for instance. [ 819.462982] env[70020]: DEBUG nova.policy [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a2a633598fe84159b970241c87588a02', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd3b44270b4b74bdba8befc7bc5f55e52', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 819.604364] env[70020]: DEBUG oslo_vmware.api [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]522c5177-7b08-2fed-da40-eb5487fabbc8, 'name': SearchDatastore_Task, 'duration_secs': 0.015495} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.605220] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11d37c43-ace6-4e25-9d51-c3ecddbaa83b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.610129] env[70020]: DEBUG oslo_vmware.api [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 819.610129] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52b7322b-41c3-de5f-bcdb-90692ff746b4" [ 819.610129] env[70020]: _type = "Task" [ 819.610129] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.619842] env[70020]: DEBUG oslo_vmware.api [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b7322b-41c3-de5f-bcdb-90692ff746b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.661112] env[70020]: DEBUG nova.compute.manager [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 819.662141] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-051a003b-87bf-4728-84e1-eccb90071fa8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.792673] env[70020]: DEBUG oslo_concurrency.lockutils [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Acquiring lock "refresh_cache-29d41731-4ae2-4cc4-bfda-b7356922c8ff" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.792837] env[70020]: DEBUG oslo_concurrency.lockutils [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Acquired lock "refresh_cache-29d41731-4ae2-4cc4-bfda-b7356922c8ff" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 819.794605] env[70020]: DEBUG nova.network.neutron [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 819.799171] env[70020]: DEBUG oslo_vmware.api [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618237, 'name': Rename_Task, 'duration_secs': 0.136841} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.799171] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: f7a42358-f26a-4651-a929-d3836f050648] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 819.799492] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-522fe8fd-04ce-46f0-9ccd-0ea3f0ada66d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.806894] env[70020]: DEBUG oslo_vmware.api [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 819.806894] env[70020]: value = "task-3618238" [ 819.806894] env[70020]: _type = "Task" [ 819.806894] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.819485] env[70020]: DEBUG oslo_vmware.api [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618238, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.853108] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618235, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.875950] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 819.877296] env[70020]: DEBUG nova.network.neutron [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Successfully created port: 43c0cb8b-c829-4fa8-908c-527551c10fb9 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 819.905586] env[70020]: DEBUG nova.compute.manager [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 819.914541] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edbb592b-4e75-4f9d-b361-37f8720f636b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.923075] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a5a307a-4359-4d50-875b-759df48296bc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.928276] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ec001563-4b07-487a-83d5-db05d72e8e1f tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 819.961826] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59112200-4f64-4783-8f7a-7fd7f0a0bc97 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.970722] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24ab72a2-f2fb-4f31-95c3-807f0924310a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.985728] env[70020]: DEBUG nova.compute.provider_tree [None req-1ef4b391-5fed-4275-80cb-ac68c9126c66 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 820.124538] env[70020]: DEBUG oslo_vmware.api [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b7322b-41c3-de5f-bcdb-90692ff746b4, 'name': SearchDatastore_Task, 'duration_secs': 0.009057} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.125268] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 820.125543] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] ea97f6ab-057e-44d3-835a-68b46d241621/ea97f6ab-057e-44d3-835a-68b46d241621.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 820.125922] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-53cf690d-c4e2-4060-bf37-c171ba566140 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.132408] env[70020]: DEBUG oslo_vmware.api [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 820.132408] env[70020]: value = "task-3618239" [ 820.132408] env[70020]: _type = "Task" [ 820.132408] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.140465] env[70020]: DEBUG oslo_vmware.api [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618239, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.150544] env[70020]: DEBUG nova.compute.manager [req-eb7de462-e5a7-453e-bb9c-be2cb9f413a3 req-416aba0c-db1e-4e8a-87f0-ee0539a3c7dc service nova] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Received event network-vif-plugged-f1554235-9a1a-4e1a-9f15-a47bfe87eddd {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 820.150795] env[70020]: DEBUG oslo_concurrency.lockutils [req-eb7de462-e5a7-453e-bb9c-be2cb9f413a3 req-416aba0c-db1e-4e8a-87f0-ee0539a3c7dc service nova] Acquiring lock "29d41731-4ae2-4cc4-bfda-b7356922c8ff-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 820.151037] env[70020]: DEBUG oslo_concurrency.lockutils [req-eb7de462-e5a7-453e-bb9c-be2cb9f413a3 req-416aba0c-db1e-4e8a-87f0-ee0539a3c7dc service nova] Lock "29d41731-4ae2-4cc4-bfda-b7356922c8ff-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 820.151287] env[70020]: DEBUG oslo_concurrency.lockutils [req-eb7de462-e5a7-453e-bb9c-be2cb9f413a3 req-416aba0c-db1e-4e8a-87f0-ee0539a3c7dc service nova] Lock "29d41731-4ae2-4cc4-bfda-b7356922c8ff-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 820.151487] env[70020]: DEBUG nova.compute.manager [req-eb7de462-e5a7-453e-bb9c-be2cb9f413a3 req-416aba0c-db1e-4e8a-87f0-ee0539a3c7dc service nova] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] No waiting events found dispatching network-vif-plugged-f1554235-9a1a-4e1a-9f15-a47bfe87eddd {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 820.151677] env[70020]: WARNING nova.compute.manager [req-eb7de462-e5a7-453e-bb9c-be2cb9f413a3 req-416aba0c-db1e-4e8a-87f0-ee0539a3c7dc service nova] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Received unexpected event network-vif-plugged-f1554235-9a1a-4e1a-9f15-a47bfe87eddd for instance with vm_state building and task_state spawning. [ 820.151836] env[70020]: DEBUG nova.compute.manager [req-eb7de462-e5a7-453e-bb9c-be2cb9f413a3 req-416aba0c-db1e-4e8a-87f0-ee0539a3c7dc service nova] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Received event network-changed-f1554235-9a1a-4e1a-9f15-a47bfe87eddd {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 820.152161] env[70020]: DEBUG nova.compute.manager [req-eb7de462-e5a7-453e-bb9c-be2cb9f413a3 req-416aba0c-db1e-4e8a-87f0-ee0539a3c7dc service nova] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Refreshing instance network info cache due to event network-changed-f1554235-9a1a-4e1a-9f15-a47bfe87eddd. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 820.152258] env[70020]: DEBUG oslo_concurrency.lockutils [req-eb7de462-e5a7-453e-bb9c-be2cb9f413a3 req-416aba0c-db1e-4e8a-87f0-ee0539a3c7dc service nova] Acquiring lock "refresh_cache-29d41731-4ae2-4cc4-bfda-b7356922c8ff" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.175052] env[70020]: INFO nova.compute.manager [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] instance snapshotting [ 820.178343] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80489d67-fc91-46ce-8bc1-934c1f803ef6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.198755] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97c24f08-522d-4f65-b416-0ff57a1f1808 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.318831] env[70020]: DEBUG oslo_vmware.api [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618238, 'name': PowerOnVM_Task, 'duration_secs': 0.457252} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.319146] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: f7a42358-f26a-4651-a929-d3836f050648] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 820.319364] env[70020]: INFO nova.compute.manager [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: f7a42358-f26a-4651-a929-d3836f050648] Took 7.50 seconds to spawn the instance on the hypervisor. [ 820.319669] env[70020]: DEBUG nova.compute.manager [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: f7a42358-f26a-4651-a929-d3836f050648] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 820.320829] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dcd63cf-6499-4c56-8b71-a0ad3a119b58 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.333976] env[70020]: DEBUG nova.network.neutron [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 820.353194] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618235, 'name': CreateVM_Task, 'duration_secs': 1.673446} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.355483] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 820.356229] env[70020]: DEBUG oslo_concurrency.lockutils [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.356424] env[70020]: DEBUG oslo_concurrency.lockutils [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 820.356737] env[70020]: DEBUG oslo_concurrency.lockutils [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 820.357310] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbae1b3d-ac8f-4821-871f-640a01881c08 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.361826] env[70020]: DEBUG oslo_vmware.api [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Waiting for the task: (returnval){ [ 820.361826] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]524c64cc-a727-5030-1cea-dff0a1e989b6" [ 820.361826] env[70020]: _type = "Task" [ 820.361826] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.370724] env[70020]: DEBUG oslo_vmware.api [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]524c64cc-a727-5030-1cea-dff0a1e989b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.472353] env[70020]: DEBUG nova.network.neutron [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Updating instance_info_cache with network_info: [{"id": "f1554235-9a1a-4e1a-9f15-a47bfe87eddd", "address": "fa:16:3e:96:52:50", "network": {"id": "2c56b682-8e9a-4fb4-9724-a34a00bb4455", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1229093518-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "affdcbe1612b434697a53a8692ef77a4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4d548e7-d762-406a-bb2d-dc7168a8ca67", "external-id": "nsx-vlan-transportzone-796", "segmentation_id": 796, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1554235-9a", "ovs_interfaceid": "f1554235-9a1a-4e1a-9f15-a47bfe87eddd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.489657] env[70020]: DEBUG nova.scheduler.client.report [None req-1ef4b391-5fed-4275-80cb-ac68c9126c66 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 820.646320] env[70020]: DEBUG oslo_vmware.api [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618239, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.710501] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Creating Snapshot of the VM instance {{(pid=70020) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 820.710864] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-f1d0f981-f204-4464-8749-68766bd69da6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.721492] env[70020]: DEBUG oslo_vmware.api [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 820.721492] env[70020]: value = "task-3618240" [ 820.721492] env[70020]: _type = "Task" [ 820.721492] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.730029] env[70020]: DEBUG oslo_vmware.api [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618240, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.840038] env[70020]: INFO nova.compute.manager [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: f7a42358-f26a-4651-a929-d3836f050648] Took 29.32 seconds to build instance. [ 820.875842] env[70020]: DEBUG oslo_vmware.api [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]524c64cc-a727-5030-1cea-dff0a1e989b6, 'name': SearchDatastore_Task, 'duration_secs': 0.01282} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.876168] env[70020]: DEBUG oslo_concurrency.lockutils [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 820.876487] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 820.876736] env[70020]: DEBUG oslo_concurrency.lockutils [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.876882] env[70020]: DEBUG oslo_concurrency.lockutils [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 820.877080] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 820.877682] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-95a9c48d-756a-4329-a204-8cf1b95fe773 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.890075] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 820.890075] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 820.890792] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-593eb62c-8c05-483b-8f3f-c18e3a6dc35e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.898823] env[70020]: DEBUG oslo_vmware.api [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Waiting for the task: (returnval){ [ 820.898823] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52afcf87-2a43-e088-bf78-36e672b89197" [ 820.898823] env[70020]: _type = "Task" [ 820.898823] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.908996] env[70020]: DEBUG oslo_vmware.api [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52afcf87-2a43-e088-bf78-36e672b89197, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.919429] env[70020]: DEBUG nova.compute.manager [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 820.949365] env[70020]: DEBUG nova.virt.hardware [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 820.949365] env[70020]: DEBUG nova.virt.hardware [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 820.949365] env[70020]: DEBUG nova.virt.hardware [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 820.949365] env[70020]: DEBUG nova.virt.hardware [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 820.949365] env[70020]: DEBUG nova.virt.hardware [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 820.949365] env[70020]: DEBUG nova.virt.hardware [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 820.949365] env[70020]: DEBUG nova.virt.hardware [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 820.949365] env[70020]: DEBUG nova.virt.hardware [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 820.949365] env[70020]: DEBUG nova.virt.hardware [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 820.949365] env[70020]: DEBUG nova.virt.hardware [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 820.949365] env[70020]: DEBUG nova.virt.hardware [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 820.950702] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95a6c42e-c5b7-44b5-8abf-506fbdce958b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.958322] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2388737-5428-4082-8769-53194ca9e745 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.975158] env[70020]: DEBUG oslo_concurrency.lockutils [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Releasing lock "refresh_cache-29d41731-4ae2-4cc4-bfda-b7356922c8ff" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 820.975473] env[70020]: DEBUG nova.compute.manager [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Instance network_info: |[{"id": "f1554235-9a1a-4e1a-9f15-a47bfe87eddd", "address": "fa:16:3e:96:52:50", "network": {"id": "2c56b682-8e9a-4fb4-9724-a34a00bb4455", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1229093518-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "affdcbe1612b434697a53a8692ef77a4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4d548e7-d762-406a-bb2d-dc7168a8ca67", "external-id": "nsx-vlan-transportzone-796", "segmentation_id": 796, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1554235-9a", "ovs_interfaceid": "f1554235-9a1a-4e1a-9f15-a47bfe87eddd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 820.975750] env[70020]: DEBUG oslo_concurrency.lockutils [req-eb7de462-e5a7-453e-bb9c-be2cb9f413a3 req-416aba0c-db1e-4e8a-87f0-ee0539a3c7dc service nova] Acquired lock "refresh_cache-29d41731-4ae2-4cc4-bfda-b7356922c8ff" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 820.975928] env[70020]: DEBUG nova.network.neutron [req-eb7de462-e5a7-453e-bb9c-be2cb9f413a3 req-416aba0c-db1e-4e8a-87f0-ee0539a3c7dc service nova] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Refreshing network info cache for port f1554235-9a1a-4e1a-9f15-a47bfe87eddd {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 820.977116] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:96:52:50', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b4d548e7-d762-406a-bb2d-dc7168a8ca67', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f1554235-9a1a-4e1a-9f15-a47bfe87eddd', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 820.984392] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 820.985402] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 820.985619] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3496d315-6269-4359-b716-9a61a97241b5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.000282] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1ef4b391-5fed-4275-80cb-ac68c9126c66 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.104s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 821.002355] env[70020]: DEBUG oslo_concurrency.lockutils [None req-895ed8e8-53ea-4732-9f9c-013a1d5d7bc7 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.436s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 821.002567] env[70020]: DEBUG nova.objects.instance [None req-895ed8e8-53ea-4732-9f9c-013a1d5d7bc7 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Lazy-loading 'resources' on Instance uuid 0add6226-3b90-4991-8f2b-81c35e72a7df {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 821.009808] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 821.009808] env[70020]: value = "task-3618241" [ 821.009808] env[70020]: _type = "Task" [ 821.009808] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.021012] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618241, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.029825] env[70020]: INFO nova.scheduler.client.report [None req-1ef4b391-5fed-4275-80cb-ac68c9126c66 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Deleted allocations for instance 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3 [ 821.145163] env[70020]: DEBUG oslo_vmware.api [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618239, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.809055} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.145438] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] ea97f6ab-057e-44d3-835a-68b46d241621/ea97f6ab-057e-44d3-835a-68b46d241621.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 821.145647] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 821.145901] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eb1d7baa-9f22-422d-9ed7-e78c08ef4365 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.152865] env[70020]: DEBUG oslo_vmware.api [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 821.152865] env[70020]: value = "task-3618242" [ 821.152865] env[70020]: _type = "Task" [ 821.152865] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.167403] env[70020]: DEBUG oslo_vmware.api [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618242, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.233282] env[70020]: DEBUG oslo_vmware.api [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618240, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.343644] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9ee25f95-b132-43c8-bccd-7aca01458834 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "f7a42358-f26a-4651-a929-d3836f050648" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.918s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 821.414022] env[70020]: DEBUG oslo_vmware.api [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52afcf87-2a43-e088-bf78-36e672b89197, 'name': SearchDatastore_Task, 'duration_secs': 0.019878} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.414022] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca528148-bf90-4b7b-8395-07a1809c1896 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.419036] env[70020]: DEBUG oslo_vmware.api [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Waiting for the task: (returnval){ [ 821.419036] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52140030-90ce-6df0-a3cf-96337723d819" [ 821.419036] env[70020]: _type = "Task" [ 821.419036] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.430269] env[70020]: DEBUG oslo_vmware.api [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52140030-90ce-6df0-a3cf-96337723d819, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.521480] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618241, 'name': CreateVM_Task, 'duration_secs': 0.407628} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.521656] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 821.522464] env[70020]: DEBUG oslo_concurrency.lockutils [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.522641] env[70020]: DEBUG oslo_concurrency.lockutils [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 821.522969] env[70020]: DEBUG oslo_concurrency.lockutils [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 821.523253] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf39a974-66c9-4da9-8dd2-b16222a10b90 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.530012] env[70020]: DEBUG oslo_vmware.api [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for the task: (returnval){ [ 821.530012] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]521db53a-4ba1-f610-b830-0a252c852e53" [ 821.530012] env[70020]: _type = "Task" [ 821.530012] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.543852] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1ef4b391-5fed-4275-80cb-ac68c9126c66 tempest-ServersTestJSON-2095302437 tempest-ServersTestJSON-2095302437-project-member] Lock "8f7e4e69-0796-469f-8a2b-4e19fbf15ed3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.516s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 821.552034] env[70020]: DEBUG oslo_vmware.api [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]521db53a-4ba1-f610-b830-0a252c852e53, 'name': SearchDatastore_Task, 'duration_secs': 0.01536} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.552034] env[70020]: DEBUG oslo_concurrency.lockutils [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 821.552034] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 821.552034] env[70020]: DEBUG oslo_concurrency.lockutils [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.552034] env[70020]: DEBUG oslo_concurrency.lockutils [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 821.552034] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 821.552034] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-228a0d6c-3995-49aa-ae15-36b1dc1c7f13 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.566806] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 821.566806] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 821.566806] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96aeabd5-8b76-4508-b429-db677ef28ddb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.570592] env[70020]: DEBUG oslo_vmware.api [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for the task: (returnval){ [ 821.570592] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]527af4ee-aee3-afe5-daf5-319a43794c41" [ 821.570592] env[70020]: _type = "Task" [ 821.570592] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.580648] env[70020]: DEBUG oslo_vmware.api [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]527af4ee-aee3-afe5-daf5-319a43794c41, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.662916] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3ac9bd14-c8e8-48f5-b2cc-50d50b1b1414 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "f7a42358-f26a-4651-a929-d3836f050648" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 821.663263] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3ac9bd14-c8e8-48f5-b2cc-50d50b1b1414 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "f7a42358-f26a-4651-a929-d3836f050648" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 821.663416] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3ac9bd14-c8e8-48f5-b2cc-50d50b1b1414 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "f7a42358-f26a-4651-a929-d3836f050648-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 821.663618] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3ac9bd14-c8e8-48f5-b2cc-50d50b1b1414 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "f7a42358-f26a-4651-a929-d3836f050648-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 821.663838] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3ac9bd14-c8e8-48f5-b2cc-50d50b1b1414 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "f7a42358-f26a-4651-a929-d3836f050648-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 821.666192] env[70020]: INFO nova.compute.manager [None req-3ac9bd14-c8e8-48f5-b2cc-50d50b1b1414 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: f7a42358-f26a-4651-a929-d3836f050648] Terminating instance [ 821.669689] env[70020]: DEBUG oslo_vmware.api [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618242, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070627} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.672864] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 821.674075] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b3ab239-e943-490c-9a06-cfc4dabf3639 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.708206] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Reconfiguring VM instance instance-0000000d to attach disk [datastore2] ea97f6ab-057e-44d3-835a-68b46d241621/ea97f6ab-057e-44d3-835a-68b46d241621.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 821.711739] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c28a15c-b5f6-47a4-8120-860d72701e09 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.740623] env[70020]: DEBUG oslo_vmware.api [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618240, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.744054] env[70020]: DEBUG oslo_vmware.api [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 821.744054] env[70020]: value = "task-3618243" [ 821.744054] env[70020]: _type = "Task" [ 821.744054] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.752493] env[70020]: DEBUG oslo_vmware.api [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618243, 'name': ReconfigVM_Task} progress is 10%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.767905] env[70020]: DEBUG nova.network.neutron [req-eb7de462-e5a7-453e-bb9c-be2cb9f413a3 req-416aba0c-db1e-4e8a-87f0-ee0539a3c7dc service nova] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Updated VIF entry in instance network info cache for port f1554235-9a1a-4e1a-9f15-a47bfe87eddd. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 821.768294] env[70020]: DEBUG nova.network.neutron [req-eb7de462-e5a7-453e-bb9c-be2cb9f413a3 req-416aba0c-db1e-4e8a-87f0-ee0539a3c7dc service nova] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Updating instance_info_cache with network_info: [{"id": "f1554235-9a1a-4e1a-9f15-a47bfe87eddd", "address": "fa:16:3e:96:52:50", "network": {"id": "2c56b682-8e9a-4fb4-9724-a34a00bb4455", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1229093518-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "affdcbe1612b434697a53a8692ef77a4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4d548e7-d762-406a-bb2d-dc7168a8ca67", "external-id": "nsx-vlan-transportzone-796", "segmentation_id": 796, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1554235-9a", "ovs_interfaceid": "f1554235-9a1a-4e1a-9f15-a47bfe87eddd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.814969] env[70020]: DEBUG nova.network.neutron [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Successfully updated port: 43c0cb8b-c829-4fa8-908c-527551c10fb9 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 821.846158] env[70020]: DEBUG nova.compute.manager [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 821.929202] env[70020]: DEBUG oslo_vmware.api [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52140030-90ce-6df0-a3cf-96337723d819, 'name': SearchDatastore_Task, 'duration_secs': 0.01429} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.931692] env[70020]: DEBUG oslo_concurrency.lockutils [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 821.931927] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] f16d60a4-5f80-4f41-b994-068de48775ad/f16d60a4-5f80-4f41-b994-068de48775ad.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 821.932387] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-15812581-6b61-4f61-b555-88b18fbd8f3a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.938360] env[70020]: DEBUG oslo_vmware.api [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Waiting for the task: (returnval){ [ 821.938360] env[70020]: value = "task-3618244" [ 821.938360] env[70020]: _type = "Task" [ 821.938360] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.949453] env[70020]: DEBUG oslo_vmware.api [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': task-3618244, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.081541] env[70020]: DEBUG oslo_vmware.api [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]527af4ee-aee3-afe5-daf5-319a43794c41, 'name': SearchDatastore_Task, 'duration_secs': 0.036199} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.084861] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c5295d6-fcd3-418e-bbe6-01fd86b03a28 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.091414] env[70020]: DEBUG oslo_vmware.api [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for the task: (returnval){ [ 822.091414] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52a99746-20ef-403f-c0f9-86b8d0589479" [ 822.091414] env[70020]: _type = "Task" [ 822.091414] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.100407] env[70020]: DEBUG oslo_vmware.api [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52a99746-20ef-403f-c0f9-86b8d0589479, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.116711] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d762359b-1e2e-4925-b373-27afbac2ab08 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.124292] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05d2bcfb-796b-4f60-8de6-f6744e84e454 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.161675] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbba7654-4204-4816-ab84-73dc028a131f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.170922] env[70020]: DEBUG nova.compute.manager [None req-3ac9bd14-c8e8-48f5-b2cc-50d50b1b1414 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: f7a42358-f26a-4651-a929-d3836f050648] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 822.171194] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3ac9bd14-c8e8-48f5-b2cc-50d50b1b1414 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: f7a42358-f26a-4651-a929-d3836f050648] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 822.172739] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbf22b50-dbd5-401a-98cf-969c3ee11519 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.176754] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e72d08f0-7c47-44fa-a8d8-cdca994d5d85 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.183829] env[70020]: DEBUG nova.compute.manager [req-b4dbfecb-b2cf-4bb4-ac30-4abf67a599e8 req-1eabc943-f641-47ec-bf2f-411f0f6ea543 service nova] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Received event network-vif-plugged-43c0cb8b-c829-4fa8-908c-527551c10fb9 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 822.184058] env[70020]: DEBUG oslo_concurrency.lockutils [req-b4dbfecb-b2cf-4bb4-ac30-4abf67a599e8 req-1eabc943-f641-47ec-bf2f-411f0f6ea543 service nova] Acquiring lock "38839949-c717-4f0b-97a7-108d87417b88-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 822.184282] env[70020]: DEBUG oslo_concurrency.lockutils [req-b4dbfecb-b2cf-4bb4-ac30-4abf67a599e8 req-1eabc943-f641-47ec-bf2f-411f0f6ea543 service nova] Lock "38839949-c717-4f0b-97a7-108d87417b88-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 822.184491] env[70020]: DEBUG oslo_concurrency.lockutils [req-b4dbfecb-b2cf-4bb4-ac30-4abf67a599e8 req-1eabc943-f641-47ec-bf2f-411f0f6ea543 service nova] Lock "38839949-c717-4f0b-97a7-108d87417b88-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 822.184900] env[70020]: DEBUG nova.compute.manager [req-b4dbfecb-b2cf-4bb4-ac30-4abf67a599e8 req-1eabc943-f641-47ec-bf2f-411f0f6ea543 service nova] [instance: 38839949-c717-4f0b-97a7-108d87417b88] No waiting events found dispatching network-vif-plugged-43c0cb8b-c829-4fa8-908c-527551c10fb9 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 822.184900] env[70020]: WARNING nova.compute.manager [req-b4dbfecb-b2cf-4bb4-ac30-4abf67a599e8 req-1eabc943-f641-47ec-bf2f-411f0f6ea543 service nova] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Received unexpected event network-vif-plugged-43c0cb8b-c829-4fa8-908c-527551c10fb9 for instance with vm_state building and task_state spawning. [ 822.185040] env[70020]: DEBUG nova.compute.manager [req-b4dbfecb-b2cf-4bb4-ac30-4abf67a599e8 req-1eabc943-f641-47ec-bf2f-411f0f6ea543 service nova] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Received event network-changed-43c0cb8b-c829-4fa8-908c-527551c10fb9 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 822.185214] env[70020]: DEBUG nova.compute.manager [req-b4dbfecb-b2cf-4bb4-ac30-4abf67a599e8 req-1eabc943-f641-47ec-bf2f-411f0f6ea543 service nova] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Refreshing instance network info cache due to event network-changed-43c0cb8b-c829-4fa8-908c-527551c10fb9. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 822.185272] env[70020]: DEBUG oslo_concurrency.lockutils [req-b4dbfecb-b2cf-4bb4-ac30-4abf67a599e8 req-1eabc943-f641-47ec-bf2f-411f0f6ea543 service nova] Acquiring lock "refresh_cache-38839949-c717-4f0b-97a7-108d87417b88" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.185406] env[70020]: DEBUG oslo_concurrency.lockutils [req-b4dbfecb-b2cf-4bb4-ac30-4abf67a599e8 req-1eabc943-f641-47ec-bf2f-411f0f6ea543 service nova] Acquired lock "refresh_cache-38839949-c717-4f0b-97a7-108d87417b88" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 822.185576] env[70020]: DEBUG nova.network.neutron [req-b4dbfecb-b2cf-4bb4-ac30-4abf67a599e8 req-1eabc943-f641-47ec-bf2f-411f0f6ea543 service nova] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Refreshing network info cache for port 43c0cb8b-c829-4fa8-908c-527551c10fb9 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 822.198293] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ac9bd14-c8e8-48f5-b2cc-50d50b1b1414 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: f7a42358-f26a-4651-a929-d3836f050648] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 822.200483] env[70020]: DEBUG nova.compute.provider_tree [None req-895ed8e8-53ea-4732-9f9c-013a1d5d7bc7 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 822.201139] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4bd825ce-43b0-42b7-94e2-a2a14001f6ba {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.210062] env[70020]: DEBUG oslo_vmware.api [None req-3ac9bd14-c8e8-48f5-b2cc-50d50b1b1414 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 822.210062] env[70020]: value = "task-3618245" [ 822.210062] env[70020]: _type = "Task" [ 822.210062] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.221576] env[70020]: DEBUG oslo_vmware.api [None req-3ac9bd14-c8e8-48f5-b2cc-50d50b1b1414 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618245, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.245018] env[70020]: DEBUG oslo_vmware.api [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618240, 'name': CreateSnapshot_Task, 'duration_secs': 1.45061} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.245018] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Created Snapshot of the VM instance {{(pid=70020) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 822.245395] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2babb4d8-e36f-42ed-9c4c-cb3622ea4d68 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.263228] env[70020]: DEBUG oslo_vmware.api [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618243, 'name': ReconfigVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.271799] env[70020]: DEBUG oslo_concurrency.lockutils [req-eb7de462-e5a7-453e-bb9c-be2cb9f413a3 req-416aba0c-db1e-4e8a-87f0-ee0539a3c7dc service nova] Releasing lock "refresh_cache-29d41731-4ae2-4cc4-bfda-b7356922c8ff" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 822.317820] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Acquiring lock "refresh_cache-38839949-c717-4f0b-97a7-108d87417b88" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.373266] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 822.448628] env[70020]: DEBUG oslo_vmware.api [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': task-3618244, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.507603} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.450659] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] f16d60a4-5f80-4f41-b994-068de48775ad/f16d60a4-5f80-4f41-b994-068de48775ad.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 822.450659] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 822.450659] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-faf27cf0-1439-4d1c-8894-57551117a44d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.456572] env[70020]: DEBUG oslo_vmware.api [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Waiting for the task: (returnval){ [ 822.456572] env[70020]: value = "task-3618246" [ 822.456572] env[70020]: _type = "Task" [ 822.456572] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.465376] env[70020]: DEBUG oslo_vmware.api [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': task-3618246, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.606125] env[70020]: DEBUG oslo_vmware.api [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52a99746-20ef-403f-c0f9-86b8d0589479, 'name': SearchDatastore_Task, 'duration_secs': 0.010348} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.607381] env[70020]: DEBUG oslo_concurrency.lockutils [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 822.607381] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 29d41731-4ae2-4cc4-bfda-b7356922c8ff/29d41731-4ae2-4cc4-bfda-b7356922c8ff.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 822.607504] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5190d9ea-90fa-4c2d-b2ca-7a8269e89ed8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.617772] env[70020]: DEBUG oslo_vmware.api [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for the task: (returnval){ [ 822.617772] env[70020]: value = "task-3618247" [ 822.617772] env[70020]: _type = "Task" [ 822.617772] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.627708] env[70020]: DEBUG oslo_vmware.api [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618247, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.704687] env[70020]: DEBUG nova.scheduler.client.report [None req-895ed8e8-53ea-4732-9f9c-013a1d5d7bc7 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 822.724380] env[70020]: DEBUG oslo_vmware.api [None req-3ac9bd14-c8e8-48f5-b2cc-50d50b1b1414 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618245, 'name': PowerOffVM_Task, 'duration_secs': 0.454278} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.724380] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ac9bd14-c8e8-48f5-b2cc-50d50b1b1414 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: f7a42358-f26a-4651-a929-d3836f050648] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 822.724380] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3ac9bd14-c8e8-48f5-b2cc-50d50b1b1414 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: f7a42358-f26a-4651-a929-d3836f050648] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 822.724579] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7636e268-1d11-45f9-af62-c12f5f882e91 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.755039] env[70020]: DEBUG oslo_vmware.api [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618243, 'name': ReconfigVM_Task, 'duration_secs': 0.715562} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.755039] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Reconfigured VM instance instance-0000000d to attach disk [datastore2] ea97f6ab-057e-44d3-835a-68b46d241621/ea97f6ab-057e-44d3-835a-68b46d241621.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 822.755670] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bb8e0208-31ce-4e8e-8025-433093713b32 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.761391] env[70020]: DEBUG oslo_vmware.api [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 822.761391] env[70020]: value = "task-3618249" [ 822.761391] env[70020]: _type = "Task" [ 822.761391] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.771366] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Creating linked-clone VM from snapshot {{(pid=70020) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 822.772351] env[70020]: DEBUG nova.network.neutron [req-b4dbfecb-b2cf-4bb4-ac30-4abf67a599e8 req-1eabc943-f641-47ec-bf2f-411f0f6ea543 service nova] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 822.774683] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-0d80952b-a32a-4925-9469-e16dd1e3b113 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.786316] env[70020]: DEBUG oslo_vmware.api [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618249, 'name': Rename_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.788768] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3ac9bd14-c8e8-48f5-b2cc-50d50b1b1414 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: f7a42358-f26a-4651-a929-d3836f050648] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 822.789116] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3ac9bd14-c8e8-48f5-b2cc-50d50b1b1414 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: f7a42358-f26a-4651-a929-d3836f050648] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 822.789339] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ac9bd14-c8e8-48f5-b2cc-50d50b1b1414 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Deleting the datastore file [datastore1] f7a42358-f26a-4651-a929-d3836f050648 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 822.791058] env[70020]: DEBUG oslo_vmware.api [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 822.791058] env[70020]: value = "task-3618250" [ 822.791058] env[70020]: _type = "Task" [ 822.791058] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.791058] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8999add0-6aee-41ff-a60b-dcc0ffce40ff {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.808518] env[70020]: DEBUG oslo_vmware.api [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618250, 'name': CloneVM_Task} progress is 10%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.810390] env[70020]: DEBUG oslo_vmware.api [None req-3ac9bd14-c8e8-48f5-b2cc-50d50b1b1414 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 822.810390] env[70020]: value = "task-3618251" [ 822.810390] env[70020]: _type = "Task" [ 822.810390] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.819244] env[70020]: DEBUG oslo_vmware.api [None req-3ac9bd14-c8e8-48f5-b2cc-50d50b1b1414 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618251, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.857778] env[70020]: DEBUG nova.network.neutron [req-b4dbfecb-b2cf-4bb4-ac30-4abf67a599e8 req-1eabc943-f641-47ec-bf2f-411f0f6ea543 service nova] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.971868] env[70020]: DEBUG oslo_vmware.api [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': task-3618246, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.221891} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.975547] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 822.976751] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2fe7e9e-4328-4780-a66a-aab0000a5c05 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.004805] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Reconfiguring VM instance instance-00000034 to attach disk [datastore1] f16d60a4-5f80-4f41-b994-068de48775ad/f16d60a4-5f80-4f41-b994-068de48775ad.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 823.005384] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ccc9e5e-7c03-4318-9be5-ceca656a7e9d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.027048] env[70020]: DEBUG oslo_vmware.api [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Waiting for the task: (returnval){ [ 823.027048] env[70020]: value = "task-3618252" [ 823.027048] env[70020]: _type = "Task" [ 823.027048] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.037364] env[70020]: DEBUG oslo_vmware.api [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': task-3618252, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.129824] env[70020]: DEBUG oslo_vmware.api [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618247, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.217259] env[70020]: DEBUG oslo_concurrency.lockutils [None req-895ed8e8-53ea-4732-9f9c-013a1d5d7bc7 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.215s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 823.220943] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.092s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 823.222812] env[70020]: INFO nova.compute.claims [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 823.249695] env[70020]: INFO nova.scheduler.client.report [None req-895ed8e8-53ea-4732-9f9c-013a1d5d7bc7 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Deleted allocations for instance 0add6226-3b90-4991-8f2b-81c35e72a7df [ 823.273722] env[70020]: DEBUG oslo_vmware.api [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618249, 'name': Rename_Task, 'duration_secs': 0.451889} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.274867] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 823.275165] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ee548617-9e92-46f7-9858-6addbcc7e658 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.283290] env[70020]: DEBUG oslo_vmware.api [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 823.283290] env[70020]: value = "task-3618253" [ 823.283290] env[70020]: _type = "Task" [ 823.283290] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.299774] env[70020]: DEBUG oslo_vmware.api [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618253, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.305910] env[70020]: DEBUG oslo_vmware.api [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618250, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.328411] env[70020]: DEBUG oslo_vmware.api [None req-3ac9bd14-c8e8-48f5-b2cc-50d50b1b1414 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618251, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.31553} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.328493] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ac9bd14-c8e8-48f5-b2cc-50d50b1b1414 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 823.328747] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3ac9bd14-c8e8-48f5-b2cc-50d50b1b1414 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: f7a42358-f26a-4651-a929-d3836f050648] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 823.328930] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3ac9bd14-c8e8-48f5-b2cc-50d50b1b1414 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: f7a42358-f26a-4651-a929-d3836f050648] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 823.329205] env[70020]: INFO nova.compute.manager [None req-3ac9bd14-c8e8-48f5-b2cc-50d50b1b1414 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: f7a42358-f26a-4651-a929-d3836f050648] Took 1.16 seconds to destroy the instance on the hypervisor. [ 823.330037] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3ac9bd14-c8e8-48f5-b2cc-50d50b1b1414 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 823.330037] env[70020]: DEBUG nova.compute.manager [-] [instance: f7a42358-f26a-4651-a929-d3836f050648] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 823.330263] env[70020]: DEBUG nova.network.neutron [-] [instance: f7a42358-f26a-4651-a929-d3836f050648] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 823.362475] env[70020]: DEBUG oslo_concurrency.lockutils [req-b4dbfecb-b2cf-4bb4-ac30-4abf67a599e8 req-1eabc943-f641-47ec-bf2f-411f0f6ea543 service nova] Releasing lock "refresh_cache-38839949-c717-4f0b-97a7-108d87417b88" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 823.363271] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Acquired lock "refresh_cache-38839949-c717-4f0b-97a7-108d87417b88" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 823.363575] env[70020]: DEBUG nova.network.neutron [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 823.538100] env[70020]: DEBUG oslo_vmware.api [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': task-3618252, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.630463] env[70020]: DEBUG oslo_vmware.api [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618247, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.542908} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.631050] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 29d41731-4ae2-4cc4-bfda-b7356922c8ff/29d41731-4ae2-4cc4-bfda-b7356922c8ff.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 823.631458] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 823.631821] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4aa6f0c7-050d-4aa3-af0f-fb103c8541c0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.639453] env[70020]: DEBUG oslo_vmware.api [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for the task: (returnval){ [ 823.639453] env[70020]: value = "task-3618254" [ 823.639453] env[70020]: _type = "Task" [ 823.639453] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.648790] env[70020]: DEBUG oslo_vmware.api [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618254, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.762576] env[70020]: DEBUG oslo_concurrency.lockutils [None req-895ed8e8-53ea-4732-9f9c-013a1d5d7bc7 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Lock "0add6226-3b90-4991-8f2b-81c35e72a7df" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.681s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 823.797026] env[70020]: DEBUG oslo_vmware.api [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618253, 'name': PowerOnVM_Task, 'duration_secs': 0.509245} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.801128] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 823.801128] env[70020]: DEBUG nova.compute.manager [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 823.802790] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31a0743a-c463-42ad-91d6-c583e2615c76 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.810948] env[70020]: DEBUG oslo_vmware.api [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618250, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.899059] env[70020]: DEBUG nova.network.neutron [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 824.044760] env[70020]: DEBUG oslo_vmware.api [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': task-3618252, 'name': ReconfigVM_Task, 'duration_secs': 0.907572} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.044760] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Reconfigured VM instance instance-00000034 to attach disk [datastore1] f16d60a4-5f80-4f41-b994-068de48775ad/f16d60a4-5f80-4f41-b994-068de48775ad.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 824.044760] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=70020) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 824.046902] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-da690414-cf84-4d78-97a4-195d47a197d7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.059107] env[70020]: DEBUG oslo_vmware.api [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Waiting for the task: (returnval){ [ 824.059107] env[70020]: value = "task-3618255" [ 824.059107] env[70020]: _type = "Task" [ 824.059107] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.060142] env[70020]: DEBUG nova.network.neutron [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Updating instance_info_cache with network_info: [{"id": "43c0cb8b-c829-4fa8-908c-527551c10fb9", "address": "fa:16:3e:9f:63:30", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.203", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43c0cb8b-c8", "ovs_interfaceid": "43c0cb8b-c829-4fa8-908c-527551c10fb9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.074889] env[70020]: DEBUG oslo_vmware.api [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': task-3618255, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.150531] env[70020]: DEBUG oslo_vmware.api [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618254, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084215} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.150531] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 824.150531] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a25d77c2-a0d3-40d7-9d9c-bcbeafb03aec {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.174800] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Reconfiguring VM instance instance-00000035 to attach disk [datastore2] 29d41731-4ae2-4cc4-bfda-b7356922c8ff/29d41731-4ae2-4cc4-bfda-b7356922c8ff.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 824.174954] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2a4b5aa5-4fb4-46bf-8a1d-9ade7c836ccb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.194702] env[70020]: DEBUG oslo_vmware.api [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for the task: (returnval){ [ 824.194702] env[70020]: value = "task-3618256" [ 824.194702] env[70020]: _type = "Task" [ 824.194702] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.203599] env[70020]: DEBUG oslo_vmware.api [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618256, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.303041] env[70020]: DEBUG nova.compute.manager [req-7d0c0711-25b0-43a0-bb44-417b256e8fb1 req-5c405234-3940-45f3-918f-d688c569abdb service nova] [instance: f7a42358-f26a-4651-a929-d3836f050648] Received event network-vif-deleted-40c50349-5efa-4257-b479-b8632c4d6a66 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 824.303286] env[70020]: INFO nova.compute.manager [req-7d0c0711-25b0-43a0-bb44-417b256e8fb1 req-5c405234-3940-45f3-918f-d688c569abdb service nova] [instance: f7a42358-f26a-4651-a929-d3836f050648] Neutron deleted interface 40c50349-5efa-4257-b479-b8632c4d6a66; detaching it from the instance and deleting it from the info cache [ 824.303456] env[70020]: DEBUG nova.network.neutron [req-7d0c0711-25b0-43a0-bb44-417b256e8fb1 req-5c405234-3940-45f3-918f-d688c569abdb service nova] [instance: f7a42358-f26a-4651-a929-d3836f050648] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.312648] env[70020]: DEBUG oslo_vmware.api [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618250, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.336016] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 824.545883] env[70020]: DEBUG nova.network.neutron [-] [instance: f7a42358-f26a-4651-a929-d3836f050648] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.569261] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Releasing lock "refresh_cache-38839949-c717-4f0b-97a7-108d87417b88" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 824.570392] env[70020]: DEBUG nova.compute.manager [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Instance network_info: |[{"id": "43c0cb8b-c829-4fa8-908c-527551c10fb9", "address": "fa:16:3e:9f:63:30", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.203", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43c0cb8b-c8", "ovs_interfaceid": "43c0cb8b-c829-4fa8-908c-527551c10fb9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 824.570392] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9f:63:30', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '822050c7-1845-485d-b87e-73778d21c33c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '43c0cb8b-c829-4fa8-908c-527551c10fb9', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 824.577623] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 824.581356] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 824.585109] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7836b7e3-f004-47bf-b0fe-5d602e443547 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.599070] env[70020]: DEBUG oslo_vmware.api [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': task-3618255, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.053283} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.602143] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=70020) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 824.603589] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-222ed4d2-de32-4017-b77d-e3b9b53a096f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.607509] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 824.607509] env[70020]: value = "task-3618257" [ 824.607509] env[70020]: _type = "Task" [ 824.607509] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.630221] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Reconfiguring VM instance instance-00000034 to attach disk [datastore1] f16d60a4-5f80-4f41-b994-068de48775ad/ephemeral_0.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 824.633321] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aa49719b-5333-4e7b-bfec-22a6ac72baab {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.650059] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618257, 'name': CreateVM_Task} progress is 15%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.655996] env[70020]: DEBUG oslo_vmware.api [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Waiting for the task: (returnval){ [ 824.655996] env[70020]: value = "task-3618258" [ 824.655996] env[70020]: _type = "Task" [ 824.655996] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.664405] env[70020]: DEBUG oslo_vmware.api [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': task-3618258, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.704474] env[70020]: DEBUG oslo_vmware.api [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618256, 'name': ReconfigVM_Task, 'duration_secs': 0.293647} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.707189] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Reconfigured VM instance instance-00000035 to attach disk [datastore2] 29d41731-4ae2-4cc4-bfda-b7356922c8ff/29d41731-4ae2-4cc4-bfda-b7356922c8ff.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 824.708023] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-19403420-6545-42a5-8166-a9365a161d7f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.715694] env[70020]: DEBUG oslo_vmware.api [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for the task: (returnval){ [ 824.715694] env[70020]: value = "task-3618259" [ 824.715694] env[70020]: _type = "Task" [ 824.715694] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.729127] env[70020]: DEBUG oslo_vmware.api [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618259, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.807434] env[70020]: DEBUG oslo_vmware.api [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618250, 'name': CloneVM_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.812940] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a7dcf8e1-1a41-4023-adae-e8c6e188313b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.821321] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-470c2db8-ccde-482e-a8a2-d7999ac26ec9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.862105] env[70020]: DEBUG nova.compute.manager [req-7d0c0711-25b0-43a0-bb44-417b256e8fb1 req-5c405234-3940-45f3-918f-d688c569abdb service nova] [instance: f7a42358-f26a-4651-a929-d3836f050648] Detach interface failed, port_id=40c50349-5efa-4257-b479-b8632c4d6a66, reason: Instance f7a42358-f26a-4651-a929-d3836f050648 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 824.955686] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d93653d8-f47c-4937-bd73-f2b4717d4211 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.963420] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7c2815e-d5dc-4b42-a758-7c1ddd3b3550 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.996241] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1fe838a-925e-449a-b48e-8512152f55cc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.010152] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-342637de-34f4-4f46-9ba3-29f6dbacf44b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.025131] env[70020]: DEBUG nova.compute.provider_tree [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 825.049155] env[70020]: INFO nova.compute.manager [-] [instance: f7a42358-f26a-4651-a929-d3836f050648] Took 1.72 seconds to deallocate network for instance. [ 825.119087] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618257, 'name': CreateVM_Task, 'duration_secs': 0.326891} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.119087] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 825.119716] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.119906] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 825.120319] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 825.120581] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88b826bf-8edd-4cf9-9bbd-2c9868ef242e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.124951] env[70020]: DEBUG oslo_vmware.api [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Waiting for the task: (returnval){ [ 825.124951] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5239c702-052c-b67a-6075-43a0965187b4" [ 825.124951] env[70020]: _type = "Task" [ 825.124951] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.133071] env[70020]: DEBUG oslo_vmware.api [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5239c702-052c-b67a-6075-43a0965187b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.166322] env[70020]: DEBUG oslo_vmware.api [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': task-3618258, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.228278] env[70020]: DEBUG oslo_vmware.api [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618259, 'name': Rename_Task, 'duration_secs': 0.194147} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.228397] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 825.228728] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-459aa4ca-fb21-48bc-865b-f62e470550e5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.236715] env[70020]: DEBUG oslo_vmware.api [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for the task: (returnval){ [ 825.236715] env[70020]: value = "task-3618260" [ 825.236715] env[70020]: _type = "Task" [ 825.236715] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.245975] env[70020]: DEBUG oslo_vmware.api [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618260, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.310016] env[70020]: DEBUG oslo_vmware.api [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618250, 'name': CloneVM_Task, 'duration_secs': 2.070354} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.310016] env[70020]: INFO nova.virt.vmwareapi.vmops [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Created linked-clone VM from snapshot [ 825.310016] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ff04b11-e28c-4c89-9f69-a4b0f2970664 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.317508] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Uploading image 038d3b5b-38fb-498f-b4cc-5ed167e098c3 {{(pid=70020) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 825.345149] env[70020]: DEBUG oslo_vmware.rw_handles [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 825.345149] env[70020]: value = "vm-721683" [ 825.345149] env[70020]: _type = "VirtualMachine" [ 825.345149] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 825.345435] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-49c4a994-28ae-48d3-b326-c2360d4b3644 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.352115] env[70020]: DEBUG oslo_vmware.rw_handles [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lease: (returnval){ [ 825.352115] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]527bf5f7-1a1c-c116-6c4d-eab4b907e58e" [ 825.352115] env[70020]: _type = "HttpNfcLease" [ 825.352115] env[70020]: } obtained for exporting VM: (result){ [ 825.352115] env[70020]: value = "vm-721683" [ 825.352115] env[70020]: _type = "VirtualMachine" [ 825.352115] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 825.352528] env[70020]: DEBUG oslo_vmware.api [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the lease: (returnval){ [ 825.352528] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]527bf5f7-1a1c-c116-6c4d-eab4b907e58e" [ 825.352528] env[70020]: _type = "HttpNfcLease" [ 825.352528] env[70020]: } to be ready. {{(pid=70020) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 825.359567] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 825.359567] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]527bf5f7-1a1c-c116-6c4d-eab4b907e58e" [ 825.359567] env[70020]: _type = "HttpNfcLease" [ 825.359567] env[70020]: } is initializing. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 825.504438] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e267675c-7372-4310-95a1-a344fb35d41b tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquiring lock "55c20886-ae10-4326-a9de-f8577f320a99" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 825.504731] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e267675c-7372-4310-95a1-a344fb35d41b tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "55c20886-ae10-4326-a9de-f8577f320a99" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 825.504950] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e267675c-7372-4310-95a1-a344fb35d41b tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquiring lock "55c20886-ae10-4326-a9de-f8577f320a99-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 825.505213] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e267675c-7372-4310-95a1-a344fb35d41b tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "55c20886-ae10-4326-a9de-f8577f320a99-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 825.505738] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e267675c-7372-4310-95a1-a344fb35d41b tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "55c20886-ae10-4326-a9de-f8577f320a99-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 825.507860] env[70020]: INFO nova.compute.manager [None req-e267675c-7372-4310-95a1-a344fb35d41b tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Terminating instance [ 825.529059] env[70020]: DEBUG nova.scheduler.client.report [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 825.558981] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3ac9bd14-c8e8-48f5-b2cc-50d50b1b1414 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 825.636187] env[70020]: DEBUG oslo_vmware.api [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5239c702-052c-b67a-6075-43a0965187b4, 'name': SearchDatastore_Task, 'duration_secs': 0.012197} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.636503] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 825.636785] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 825.637085] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.637247] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 825.637458] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 825.637742] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f3a28570-3c40-4292-90b9-4587aa7cc72d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.646699] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 825.646889] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 825.647648] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97cd8c98-0a68-4cd2-beeb-169505570e8f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.654388] env[70020]: DEBUG oslo_vmware.api [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Waiting for the task: (returnval){ [ 825.654388] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5277fc11-fed5-032d-397a-af54a69724b8" [ 825.654388] env[70020]: _type = "Task" [ 825.654388] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.676173] env[70020]: DEBUG oslo_vmware.api [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5277fc11-fed5-032d-397a-af54a69724b8, 'name': SearchDatastore_Task, 'duration_secs': 0.009548} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.682711] env[70020]: DEBUG oslo_vmware.api [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': task-3618258, 'name': ReconfigVM_Task, 'duration_secs': 0.813871} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.682962] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fea51487-f028-4b68-8cd9-452be66e68d2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.685537] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Reconfigured VM instance instance-00000034 to attach disk [datastore1] f16d60a4-5f80-4f41-b994-068de48775ad/ephemeral_0.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 825.686221] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f0a89331-30d7-44c8-929b-e7874f0c3469 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.690747] env[70020]: DEBUG oslo_vmware.api [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Waiting for the task: (returnval){ [ 825.690747] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]529007e6-1bcb-a7b9-15e0-52db559f95a0" [ 825.690747] env[70020]: _type = "Task" [ 825.690747] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.696423] env[70020]: DEBUG oslo_vmware.api [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Waiting for the task: (returnval){ [ 825.696423] env[70020]: value = "task-3618262" [ 825.696423] env[70020]: _type = "Task" [ 825.696423] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.705087] env[70020]: DEBUG oslo_vmware.api [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]529007e6-1bcb-a7b9-15e0-52db559f95a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.710327] env[70020]: DEBUG oslo_vmware.api [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': task-3618262, 'name': Rename_Task} progress is 10%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.746595] env[70020]: DEBUG oslo_vmware.api [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618260, 'name': PowerOnVM_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.861115] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 825.861115] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]527bf5f7-1a1c-c116-6c4d-eab4b907e58e" [ 825.861115] env[70020]: _type = "HttpNfcLease" [ 825.861115] env[70020]: } is ready. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 825.861377] env[70020]: DEBUG oslo_vmware.rw_handles [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 825.861377] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]527bf5f7-1a1c-c116-6c4d-eab4b907e58e" [ 825.861377] env[70020]: _type = "HttpNfcLease" [ 825.861377] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 825.862147] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16d0a246-c906-4e08-a8d5-cb3e7617fca2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.869393] env[70020]: DEBUG oslo_vmware.rw_handles [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521a22bb-67c9-6f81-bea3-596ef64734aa/disk-0.vmdk from lease info. {{(pid=70020) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 825.869566] env[70020]: DEBUG oslo_vmware.rw_handles [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521a22bb-67c9-6f81-bea3-596ef64734aa/disk-0.vmdk for reading. {{(pid=70020) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 825.988884] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-c8fb6b7a-eb9a-4999-b08a-7e401002f435 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.011437] env[70020]: DEBUG nova.compute.manager [None req-e267675c-7372-4310-95a1-a344fb35d41b tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 826.011683] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e267675c-7372-4310-95a1-a344fb35d41b tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 826.014826] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6c7ba2f-0764-4e02-a297-de790f74e5c3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.022125] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-e267675c-7372-4310-95a1-a344fb35d41b tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 826.024940] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eb206482-0762-4fd8-93b6-b311d08082af {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.030919] env[70020]: DEBUG oslo_vmware.api [None req-e267675c-7372-4310-95a1-a344fb35d41b tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 826.030919] env[70020]: value = "task-3618263" [ 826.030919] env[70020]: _type = "Task" [ 826.030919] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.037096] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.817s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 826.037639] env[70020]: DEBUG nova.compute.manager [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 826.045342] env[70020]: DEBUG oslo_concurrency.lockutils [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.970s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 826.046150] env[70020]: INFO nova.compute.claims [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 826.054572] env[70020]: DEBUG oslo_vmware.api [None req-e267675c-7372-4310-95a1-a344fb35d41b tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618263, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.208224] env[70020]: DEBUG oslo_vmware.api [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]529007e6-1bcb-a7b9-15e0-52db559f95a0, 'name': SearchDatastore_Task, 'duration_secs': 0.012293} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.212269] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 826.212727] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 38839949-c717-4f0b-97a7-108d87417b88/38839949-c717-4f0b-97a7-108d87417b88.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 826.213220] env[70020]: DEBUG oslo_vmware.api [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': task-3618262, 'name': Rename_Task, 'duration_secs': 0.304541} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.213455] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3521d843-0d76-40be-9805-96690687c6dd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.215496] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 826.215773] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8f57501c-f7f0-44c8-85cb-92a79f1bf56b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.225293] env[70020]: DEBUG oslo_vmware.api [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Waiting for the task: (returnval){ [ 826.225293] env[70020]: value = "task-3618264" [ 826.225293] env[70020]: _type = "Task" [ 826.225293] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.225293] env[70020]: DEBUG oslo_vmware.api [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Waiting for the task: (returnval){ [ 826.225293] env[70020]: value = "task-3618265" [ 826.225293] env[70020]: _type = "Task" [ 826.225293] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.237423] env[70020]: DEBUG oslo_vmware.api [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': task-3618264, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.243559] env[70020]: DEBUG oslo_vmware.api [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Task: {'id': task-3618265, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.252013] env[70020]: DEBUG oslo_vmware.api [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618260, 'name': PowerOnVM_Task, 'duration_secs': 0.613016} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.252381] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 826.252552] env[70020]: INFO nova.compute.manager [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Took 8.04 seconds to spawn the instance on the hypervisor. [ 826.253402] env[70020]: DEBUG nova.compute.manager [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 826.254222] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aba1934-729d-45dc-9ff9-33241ff9fbd8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.548145] env[70020]: DEBUG oslo_vmware.api [None req-e267675c-7372-4310-95a1-a344fb35d41b tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618263, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.553189] env[70020]: DEBUG nova.compute.utils [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 826.554907] env[70020]: DEBUG nova.compute.manager [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 826.555815] env[70020]: DEBUG nova.network.neutron [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 826.603269] env[70020]: DEBUG nova.policy [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '33279b0a8dc848ceb443776f840845c3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '16f59a8f930846ec9299416b9ec5dd48', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 826.745247] env[70020]: DEBUG oslo_vmware.api [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Task: {'id': task-3618265, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.749631] env[70020]: DEBUG oslo_vmware.api [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': task-3618264, 'name': PowerOnVM_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.778412] env[70020]: INFO nova.compute.manager [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Took 32.46 seconds to build instance. [ 826.846058] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Acquiring lock "00232eca-da03-49ea-b62b-d9721739b0ec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 826.846412] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Lock "00232eca-da03-49ea-b62b-d9721739b0ec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 827.012957] env[70020]: DEBUG nova.network.neutron [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Successfully created port: 9f5cbd43-e9e7-4f35-b01e-e2e0dcbc1f5e {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 827.048562] env[70020]: DEBUG oslo_vmware.api [None req-e267675c-7372-4310-95a1-a344fb35d41b tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618263, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.062037] env[70020]: DEBUG nova.compute.manager [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 827.240197] env[70020]: DEBUG oslo_vmware.api [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': task-3618264, 'name': PowerOnVM_Task, 'duration_secs': 0.603353} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.243166] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 827.244046] env[70020]: INFO nova.compute.manager [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Took 11.18 seconds to spawn the instance on the hypervisor. [ 827.244046] env[70020]: DEBUG nova.compute.manager [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 827.244888] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65e8bef2-ddfc-4449-baeb-3cb60d02ac0e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.251121] env[70020]: DEBUG oslo_vmware.api [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Task: {'id': task-3618265, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.625537} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.251798] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 38839949-c717-4f0b-97a7-108d87417b88/38839949-c717-4f0b-97a7-108d87417b88.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 827.252276] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 827.252536] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b84b9501-99ea-45fb-a15a-77ecea914be4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.266911] env[70020]: DEBUG oslo_vmware.api [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Waiting for the task: (returnval){ [ 827.266911] env[70020]: value = "task-3618266" [ 827.266911] env[70020]: _type = "Task" [ 827.266911] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.276221] env[70020]: DEBUG oslo_vmware.api [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Task: {'id': task-3618266, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.283733] env[70020]: DEBUG oslo_concurrency.lockutils [None req-857d54f6-e09d-4957-be79-d3e16d8d2918 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Lock "29d41731-4ae2-4cc4-bfda-b7356922c8ff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.197s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 827.549553] env[70020]: DEBUG oslo_vmware.api [None req-e267675c-7372-4310-95a1-a344fb35d41b tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618263, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.628451] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7bfd49c-42aa-4e88-b8b1-af235f32b325 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.637181] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbdd9b12-8bd5-47be-8f78-3b25df054a3c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.671434] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b43e2182-8636-48fa-8a30-7398a6548a96 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.679847] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f51b222-79da-4151-90a2-f98ae86a115e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.697046] env[70020]: DEBUG nova.compute.provider_tree [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 827.771238] env[70020]: INFO nova.compute.manager [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Took 34.69 seconds to build instance. [ 827.783616] env[70020]: DEBUG oslo_vmware.api [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Task: {'id': task-3618266, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079601} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.783902] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 827.784997] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08fe1586-fd0a-4dc1-8a43-78ac1458c1b8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.788142] env[70020]: DEBUG nova.compute.manager [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 827.813695] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Reconfiguring VM instance instance-00000036 to attach disk [datastore2] 38839949-c717-4f0b-97a7-108d87417b88/38839949-c717-4f0b-97a7-108d87417b88.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 827.814558] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-537c91e9-38cf-4c78-af48-8d4eb945f52f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.836216] env[70020]: DEBUG oslo_vmware.api [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Waiting for the task: (returnval){ [ 827.836216] env[70020]: value = "task-3618267" [ 827.836216] env[70020]: _type = "Task" [ 827.836216] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.847502] env[70020]: DEBUG oslo_vmware.api [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Task: {'id': task-3618267, 'name': ReconfigVM_Task} progress is 10%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.051162] env[70020]: DEBUG oslo_vmware.api [None req-e267675c-7372-4310-95a1-a344fb35d41b tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618263, 'name': PowerOffVM_Task, 'duration_secs': 1.882918} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.051649] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-e267675c-7372-4310-95a1-a344fb35d41b tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 828.051949] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e267675c-7372-4310-95a1-a344fb35d41b tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 828.052466] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e16adf1f-1731-4514-a3fc-41a2991994e1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.072126] env[70020]: DEBUG nova.compute.manager [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 828.101723] env[70020]: DEBUG nova.virt.hardware [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 828.102026] env[70020]: DEBUG nova.virt.hardware [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 828.102258] env[70020]: DEBUG nova.virt.hardware [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 828.102482] env[70020]: DEBUG nova.virt.hardware [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 828.102629] env[70020]: DEBUG nova.virt.hardware [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 828.102777] env[70020]: DEBUG nova.virt.hardware [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 828.103021] env[70020]: DEBUG nova.virt.hardware [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 828.103241] env[70020]: DEBUG nova.virt.hardware [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 828.103499] env[70020]: DEBUG nova.virt.hardware [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 828.103714] env[70020]: DEBUG nova.virt.hardware [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 828.103905] env[70020]: DEBUG nova.virt.hardware [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 828.104866] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57c5c5b3-2a52-44ef-b916-438de82c1c47 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.113776] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b210d95c-8dd6-4772-848c-6eef9af42891 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.148610] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e267675c-7372-4310-95a1-a344fb35d41b tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 828.148850] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e267675c-7372-4310-95a1-a344fb35d41b tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 828.149017] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-e267675c-7372-4310-95a1-a344fb35d41b tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Deleting the datastore file [datastore1] 55c20886-ae10-4326-a9de-f8577f320a99 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 828.149308] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e8c8d2c4-01a6-4915-9e46-4ecfa95bac8e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.156803] env[70020]: DEBUG oslo_vmware.api [None req-e267675c-7372-4310-95a1-a344fb35d41b tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 828.156803] env[70020]: value = "task-3618269" [ 828.156803] env[70020]: _type = "Task" [ 828.156803] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.176326] env[70020]: DEBUG oslo_vmware.api [None req-e267675c-7372-4310-95a1-a344fb35d41b tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618269, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.201740] env[70020]: DEBUG nova.scheduler.client.report [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 828.279051] env[70020]: DEBUG oslo_concurrency.lockutils [None req-86e6aeba-c9ad-4125-86eb-e85b283d86bb tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Lock "f16d60a4-5f80-4f41-b994-068de48775ad" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 89.998s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 828.316088] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 828.348110] env[70020]: DEBUG oslo_vmware.api [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Task: {'id': task-3618267, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.565651] env[70020]: DEBUG nova.network.neutron [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Successfully updated port: 9f5cbd43-e9e7-4f35-b01e-e2e0dcbc1f5e {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 828.624202] env[70020]: DEBUG nova.compute.manager [req-4fd640c1-9671-4537-9091-b89122b17fd2 req-c2a96e03-b4d1-4d8d-be36-f3c600d995bc service nova] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Received event network-vif-plugged-9f5cbd43-e9e7-4f35-b01e-e2e0dcbc1f5e {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 828.624487] env[70020]: DEBUG oslo_concurrency.lockutils [req-4fd640c1-9671-4537-9091-b89122b17fd2 req-c2a96e03-b4d1-4d8d-be36-f3c600d995bc service nova] Acquiring lock "5c216231-afc5-41df-a243-bb2a17c20bfe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 828.625465] env[70020]: DEBUG oslo_concurrency.lockutils [req-4fd640c1-9671-4537-9091-b89122b17fd2 req-c2a96e03-b4d1-4d8d-be36-f3c600d995bc service nova] Lock "5c216231-afc5-41df-a243-bb2a17c20bfe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 828.625680] env[70020]: DEBUG oslo_concurrency.lockutils [req-4fd640c1-9671-4537-9091-b89122b17fd2 req-c2a96e03-b4d1-4d8d-be36-f3c600d995bc service nova] Lock "5c216231-afc5-41df-a243-bb2a17c20bfe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 828.625857] env[70020]: DEBUG nova.compute.manager [req-4fd640c1-9671-4537-9091-b89122b17fd2 req-c2a96e03-b4d1-4d8d-be36-f3c600d995bc service nova] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] No waiting events found dispatching network-vif-plugged-9f5cbd43-e9e7-4f35-b01e-e2e0dcbc1f5e {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 828.626068] env[70020]: WARNING nova.compute.manager [req-4fd640c1-9671-4537-9091-b89122b17fd2 req-c2a96e03-b4d1-4d8d-be36-f3c600d995bc service nova] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Received unexpected event network-vif-plugged-9f5cbd43-e9e7-4f35-b01e-e2e0dcbc1f5e for instance with vm_state building and task_state spawning. [ 828.668169] env[70020]: DEBUG oslo_vmware.api [None req-e267675c-7372-4310-95a1-a344fb35d41b tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618269, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.280678} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.668714] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-e267675c-7372-4310-95a1-a344fb35d41b tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 828.668714] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e267675c-7372-4310-95a1-a344fb35d41b tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 828.668714] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e267675c-7372-4310-95a1-a344fb35d41b tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 828.668927] env[70020]: INFO nova.compute.manager [None req-e267675c-7372-4310-95a1-a344fb35d41b tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Took 2.66 seconds to destroy the instance on the hypervisor. [ 828.669175] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e267675c-7372-4310-95a1-a344fb35d41b tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 828.669503] env[70020]: DEBUG nova.compute.manager [-] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 828.669503] env[70020]: DEBUG nova.network.neutron [-] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 828.707855] env[70020]: DEBUG oslo_concurrency.lockutils [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.663s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 828.709793] env[70020]: DEBUG nova.compute.manager [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 828.713660] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b9a2b3dd-7d2e-481c-a073-48b4a3dc10d7 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.876s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 828.713968] env[70020]: DEBUG nova.objects.instance [None req-b9a2b3dd-7d2e-481c-a073-48b4a3dc10d7 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lazy-loading 'resources' on Instance uuid c56279e2-0fc6-4546-854c-82e5fda0e7a7 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 828.782237] env[70020]: DEBUG nova.compute.manager [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 828.847468] env[70020]: DEBUG oslo_vmware.api [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Task: {'id': task-3618267, 'name': ReconfigVM_Task, 'duration_secs': 0.617913} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.847751] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Reconfigured VM instance instance-00000036 to attach disk [datastore2] 38839949-c717-4f0b-97a7-108d87417b88/38839949-c717-4f0b-97a7-108d87417b88.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 828.848391] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4dba18af-3649-434d-8c08-b4015041100d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.855911] env[70020]: DEBUG oslo_vmware.api [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Waiting for the task: (returnval){ [ 828.855911] env[70020]: value = "task-3618270" [ 828.855911] env[70020]: _type = "Task" [ 828.855911] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.865446] env[70020]: DEBUG oslo_vmware.api [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Task: {'id': task-3618270, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.068110] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "refresh_cache-5c216231-afc5-41df-a243-bb2a17c20bfe" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.068977] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquired lock "refresh_cache-5c216231-afc5-41df-a243-bb2a17c20bfe" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 829.068977] env[70020]: DEBUG nova.network.neutron [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 829.158250] env[70020]: DEBUG nova.compute.manager [req-daf70f97-35b1-4039-bed3-f04c2981750c req-bce7951d-3e08-46d5-bf2e-2409586468e5 service nova] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Received event network-changed-320d056b-ab7e-455d-a9dc-f443a22fc563 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 829.158458] env[70020]: DEBUG nova.compute.manager [req-daf70f97-35b1-4039-bed3-f04c2981750c req-bce7951d-3e08-46d5-bf2e-2409586468e5 service nova] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Refreshing instance network info cache due to event network-changed-320d056b-ab7e-455d-a9dc-f443a22fc563. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 829.158715] env[70020]: DEBUG oslo_concurrency.lockutils [req-daf70f97-35b1-4039-bed3-f04c2981750c req-bce7951d-3e08-46d5-bf2e-2409586468e5 service nova] Acquiring lock "refresh_cache-f16d60a4-5f80-4f41-b994-068de48775ad" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.158864] env[70020]: DEBUG oslo_concurrency.lockutils [req-daf70f97-35b1-4039-bed3-f04c2981750c req-bce7951d-3e08-46d5-bf2e-2409586468e5 service nova] Acquired lock "refresh_cache-f16d60a4-5f80-4f41-b994-068de48775ad" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 829.159292] env[70020]: DEBUG nova.network.neutron [req-daf70f97-35b1-4039-bed3-f04c2981750c req-bce7951d-3e08-46d5-bf2e-2409586468e5 service nova] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Refreshing network info cache for port 320d056b-ab7e-455d-a9dc-f443a22fc563 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 829.220419] env[70020]: DEBUG nova.compute.utils [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 829.221793] env[70020]: DEBUG nova.compute.manager [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 829.221955] env[70020]: DEBUG nova.network.neutron [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 829.281719] env[70020]: DEBUG nova.policy [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c32498a6608a43dab8045aef0b3006e7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '051ddf351c534f65be94aef74fb2ff03', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 829.308052] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 829.366438] env[70020]: DEBUG oslo_vmware.api [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Task: {'id': task-3618270, 'name': Rename_Task, 'duration_secs': 0.276337} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.369259] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 829.370834] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7bbec774-db81-4a15-8768-527153636c4d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.377990] env[70020]: DEBUG oslo_vmware.api [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Waiting for the task: (returnval){ [ 829.377990] env[70020]: value = "task-3618271" [ 829.377990] env[70020]: _type = "Task" [ 829.377990] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.389139] env[70020]: DEBUG oslo_vmware.api [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Task: {'id': task-3618271, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.460712] env[70020]: DEBUG nova.network.neutron [-] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.592697] env[70020]: DEBUG nova.network.neutron [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Successfully created port: ac1e36da-5de5-4451-a9e7-39165ab5f152 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 829.604970] env[70020]: DEBUG nova.compute.manager [None req-293a3a76-4aff-4d7b-9854-f3c0cfec8bb2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 829.606039] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-283bf4c6-abac-4e5b-bf2e-f6ad9fe01096 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.622120] env[70020]: DEBUG nova.network.neutron [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 829.725284] env[70020]: DEBUG nova.compute.manager [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 829.811842] env[70020]: DEBUG nova.network.neutron [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Updating instance_info_cache with network_info: [{"id": "9f5cbd43-e9e7-4f35-b01e-e2e0dcbc1f5e", "address": "fa:16:3e:7e:7f:6e", "network": {"id": "405de05e-83d0-46c5-9397-bb1ba00f7ab7", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-202335876-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16f59a8f930846ec9299416b9ec5dd48", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f5cbd43-e9", "ovs_interfaceid": "9f5cbd43-e9e7-4f35-b01e-e2e0dcbc1f5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.817362] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7858e608-a376-47b5-9c63-f17ff7a3e84d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.827357] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f99f8b1-a748-4def-b528-ccc304eeeeab {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.865019] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa7a5eaf-ce23-49e3-a956-47ad21c6ac95 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.874726] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cdcc2ec-439e-4753-8c2f-b2a8ee94c858 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.891758] env[70020]: DEBUG nova.compute.provider_tree [None req-b9a2b3dd-7d2e-481c-a073-48b4a3dc10d7 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 829.897247] env[70020]: DEBUG oslo_vmware.api [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Task: {'id': task-3618271, 'name': PowerOnVM_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.963165] env[70020]: INFO nova.compute.manager [-] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Took 1.29 seconds to deallocate network for instance. [ 830.042321] env[70020]: DEBUG nova.network.neutron [req-daf70f97-35b1-4039-bed3-f04c2981750c req-bce7951d-3e08-46d5-bf2e-2409586468e5 service nova] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Updated VIF entry in instance network info cache for port 320d056b-ab7e-455d-a9dc-f443a22fc563. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 830.042701] env[70020]: DEBUG nova.network.neutron [req-daf70f97-35b1-4039-bed3-f04c2981750c req-bce7951d-3e08-46d5-bf2e-2409586468e5 service nova] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Updating instance_info_cache with network_info: [{"id": "320d056b-ab7e-455d-a9dc-f443a22fc563", "address": "fa:16:3e:77:e3:59", "network": {"id": "47f58371-9cbc-4ed3-98be-09900c36cbf3", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-119128355-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.142", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90641c26c4064f219bf2e52694da4e0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a94c29-ddd5-4383-9219-1c2c3bb09cc5", "external-id": "nsx-vlan-transportzone-2", "segmentation_id": 2, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap320d056b-ab", "ovs_interfaceid": "320d056b-ab7e-455d-a9dc-f443a22fc563", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.121721] env[70020]: INFO nova.compute.manager [None req-293a3a76-4aff-4d7b-9854-f3c0cfec8bb2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] instance snapshotting [ 830.124619] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f9d5f4d-cf01-4775-b799-a1e0b3f824ff {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.146793] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55f37808-9e20-4344-acad-2a75e5b72b89 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.321927] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Releasing lock "refresh_cache-5c216231-afc5-41df-a243-bb2a17c20bfe" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 830.322347] env[70020]: DEBUG nova.compute.manager [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Instance network_info: |[{"id": "9f5cbd43-e9e7-4f35-b01e-e2e0dcbc1f5e", "address": "fa:16:3e:7e:7f:6e", "network": {"id": "405de05e-83d0-46c5-9397-bb1ba00f7ab7", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-202335876-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16f59a8f930846ec9299416b9ec5dd48", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f5cbd43-e9", "ovs_interfaceid": "9f5cbd43-e9e7-4f35-b01e-e2e0dcbc1f5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 830.322769] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7e:7f:6e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f01bbee7-8b9a-46be-891e-59d8142fb359', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9f5cbd43-e9e7-4f35-b01e-e2e0dcbc1f5e', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 830.330368] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Creating folder: Project (16f59a8f930846ec9299416b9ec5dd48). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 830.330784] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-837db942-982c-4baa-888e-82bebb32d06a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.341793] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Created folder: Project (16f59a8f930846ec9299416b9ec5dd48) in parent group-v721521. [ 830.341979] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Creating folder: Instances. Parent ref: group-v721685. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 830.342255] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4ecdbd06-546e-4df0-8f05-fa0c65fb1a63 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.350861] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Created folder: Instances in parent group-v721685. [ 830.350861] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 830.350861] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 830.351129] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0aad854b-3ab5-4ccc-bf5a-ae273e753032 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.369442] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 830.369442] env[70020]: value = "task-3618274" [ 830.369442] env[70020]: _type = "Task" [ 830.369442] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.376904] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618274, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.386336] env[70020]: DEBUG oslo_vmware.api [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Task: {'id': task-3618271, 'name': PowerOnVM_Task, 'duration_secs': 0.584329} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.386588] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 830.386784] env[70020]: INFO nova.compute.manager [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Took 9.47 seconds to spawn the instance on the hypervisor. [ 830.386959] env[70020]: DEBUG nova.compute.manager [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 830.387704] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43da62be-0b62-4160-816a-090ac3db8527 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.399206] env[70020]: DEBUG nova.scheduler.client.report [None req-b9a2b3dd-7d2e-481c-a073-48b4a3dc10d7 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 830.470690] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e267675c-7372-4310-95a1-a344fb35d41b tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 830.546290] env[70020]: DEBUG oslo_concurrency.lockutils [req-daf70f97-35b1-4039-bed3-f04c2981750c req-bce7951d-3e08-46d5-bf2e-2409586468e5 service nova] Releasing lock "refresh_cache-f16d60a4-5f80-4f41-b994-068de48775ad" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 830.656552] env[70020]: DEBUG nova.compute.manager [req-5fff513e-b208-4a83-a2a5-3ad97ed6f4b7 req-72813edf-d5f2-46a7-b9f4-f2d23b2bbba5 service nova] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Received event network-changed-9f5cbd43-e9e7-4f35-b01e-e2e0dcbc1f5e {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 830.656704] env[70020]: DEBUG nova.compute.manager [req-5fff513e-b208-4a83-a2a5-3ad97ed6f4b7 req-72813edf-d5f2-46a7-b9f4-f2d23b2bbba5 service nova] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Refreshing instance network info cache due to event network-changed-9f5cbd43-e9e7-4f35-b01e-e2e0dcbc1f5e. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 830.657009] env[70020]: DEBUG oslo_concurrency.lockutils [req-5fff513e-b208-4a83-a2a5-3ad97ed6f4b7 req-72813edf-d5f2-46a7-b9f4-f2d23b2bbba5 service nova] Acquiring lock "refresh_cache-5c216231-afc5-41df-a243-bb2a17c20bfe" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.657084] env[70020]: DEBUG oslo_concurrency.lockutils [req-5fff513e-b208-4a83-a2a5-3ad97ed6f4b7 req-72813edf-d5f2-46a7-b9f4-f2d23b2bbba5 service nova] Acquired lock "refresh_cache-5c216231-afc5-41df-a243-bb2a17c20bfe" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 830.657248] env[70020]: DEBUG nova.network.neutron [req-5fff513e-b208-4a83-a2a5-3ad97ed6f4b7 req-72813edf-d5f2-46a7-b9f4-f2d23b2bbba5 service nova] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Refreshing network info cache for port 9f5cbd43-e9e7-4f35-b01e-e2e0dcbc1f5e {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 830.659567] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-293a3a76-4aff-4d7b-9854-f3c0cfec8bb2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Creating Snapshot of the VM instance {{(pid=70020) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 830.659672] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-0ab985d9-83f4-470b-8dfc-55930354ebaa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.668261] env[70020]: DEBUG oslo_vmware.api [None req-293a3a76-4aff-4d7b-9854-f3c0cfec8bb2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for the task: (returnval){ [ 830.668261] env[70020]: value = "task-3618275" [ 830.668261] env[70020]: _type = "Task" [ 830.668261] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.678691] env[70020]: DEBUG oslo_vmware.api [None req-293a3a76-4aff-4d7b-9854-f3c0cfec8bb2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618275, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.735845] env[70020]: DEBUG nova.compute.manager [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 830.760017] env[70020]: DEBUG nova.virt.hardware [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T23:03:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='8fe80bc1-98b9-4377-a5a8-72095e677071',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-537589333',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 830.760017] env[70020]: DEBUG nova.virt.hardware [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 830.760017] env[70020]: DEBUG nova.virt.hardware [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 830.760017] env[70020]: DEBUG nova.virt.hardware [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 830.760017] env[70020]: DEBUG nova.virt.hardware [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 830.760580] env[70020]: DEBUG nova.virt.hardware [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 830.760971] env[70020]: DEBUG nova.virt.hardware [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 830.761378] env[70020]: DEBUG nova.virt.hardware [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 830.763735] env[70020]: DEBUG nova.virt.hardware [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 830.763735] env[70020]: DEBUG nova.virt.hardware [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 830.763735] env[70020]: DEBUG nova.virt.hardware [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 830.763735] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcdaa0b8-5806-463d-86e1-26c21e50492a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.774323] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67b0789c-60cf-45ab-94c6-e9f63df5e9fa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.881800] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618274, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.904140] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b9a2b3dd-7d2e-481c-a073-48b4a3dc10d7 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.190s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 830.906393] env[70020]: INFO nova.compute.manager [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Took 35.51 seconds to build instance. [ 830.907576] env[70020]: DEBUG oslo_concurrency.lockutils [None req-acec40ea-eb4b-49de-9535-9e8609bfd877 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.776s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 830.907792] env[70020]: DEBUG nova.objects.instance [None req-acec40ea-eb4b-49de-9535-9e8609bfd877 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lazy-loading 'resources' on Instance uuid 48efbd17-ff4e-426a-a135-f43cae8c97d0 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 830.931879] env[70020]: INFO nova.scheduler.client.report [None req-b9a2b3dd-7d2e-481c-a073-48b4a3dc10d7 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Deleted allocations for instance c56279e2-0fc6-4546-854c-82e5fda0e7a7 [ 831.178703] env[70020]: DEBUG oslo_vmware.api [None req-293a3a76-4aff-4d7b-9854-f3c0cfec8bb2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618275, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.341914] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1471196c-e29b-4f36-833c-6650189ca075 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.350232] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4f0b9e27-adb7-422e-adf0-ec062fee5551 tempest-ServersAdminNegativeTestJSON-642453273 tempest-ServersAdminNegativeTestJSON-642453273-project-admin] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Suspending the VM {{(pid=70020) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 831.351197] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-4d236a29-39a3-4061-bf80-dd5174e76567 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.364625] env[70020]: DEBUG oslo_vmware.api [None req-4f0b9e27-adb7-422e-adf0-ec062fee5551 tempest-ServersAdminNegativeTestJSON-642453273 tempest-ServersAdminNegativeTestJSON-642453273-project-admin] Waiting for the task: (returnval){ [ 831.364625] env[70020]: value = "task-3618276" [ 831.364625] env[70020]: _type = "Task" [ 831.364625] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.370198] env[70020]: DEBUG nova.network.neutron [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Successfully updated port: ac1e36da-5de5-4451-a9e7-39165ab5f152 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 831.389031] env[70020]: DEBUG oslo_vmware.api [None req-4f0b9e27-adb7-422e-adf0-ec062fee5551 tempest-ServersAdminNegativeTestJSON-642453273 tempest-ServersAdminNegativeTestJSON-642453273-project-admin] Task: {'id': task-3618276, 'name': SuspendVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.400838] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618274, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.410386] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c99ba87a-7b4f-4cf2-854b-76885197bc47 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Lock "38839949-c717-4f0b-97a7-108d87417b88" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 89.921s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 831.441131] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b9a2b3dd-7d2e-481c-a073-48b4a3dc10d7 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "c56279e2-0fc6-4546-854c-82e5fda0e7a7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.083s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 831.488821] env[70020]: DEBUG nova.network.neutron [req-5fff513e-b208-4a83-a2a5-3ad97ed6f4b7 req-72813edf-d5f2-46a7-b9f4-f2d23b2bbba5 service nova] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Updated VIF entry in instance network info cache for port 9f5cbd43-e9e7-4f35-b01e-e2e0dcbc1f5e. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 831.489308] env[70020]: DEBUG nova.network.neutron [req-5fff513e-b208-4a83-a2a5-3ad97ed6f4b7 req-72813edf-d5f2-46a7-b9f4-f2d23b2bbba5 service nova] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Updating instance_info_cache with network_info: [{"id": "9f5cbd43-e9e7-4f35-b01e-e2e0dcbc1f5e", "address": "fa:16:3e:7e:7f:6e", "network": {"id": "405de05e-83d0-46c5-9397-bb1ba00f7ab7", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-202335876-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16f59a8f930846ec9299416b9ec5dd48", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f5cbd43-e9", "ovs_interfaceid": "9f5cbd43-e9e7-4f35-b01e-e2e0dcbc1f5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.680793] env[70020]: DEBUG oslo_vmware.api [None req-293a3a76-4aff-4d7b-9854-f3c0cfec8bb2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618275, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.869416] env[70020]: DEBUG oslo_vmware.api [None req-4f0b9e27-adb7-422e-adf0-ec062fee5551 tempest-ServersAdminNegativeTestJSON-642453273 tempest-ServersAdminNegativeTestJSON-642453273-project-admin] Task: {'id': task-3618276, 'name': SuspendVM_Task} progress is 50%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.882862] env[70020]: DEBUG oslo_concurrency.lockutils [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquiring lock "refresh_cache-b53f55c1-1867-410c-9c53-f552ff30d697" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.883054] env[70020]: DEBUG oslo_concurrency.lockutils [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquired lock "refresh_cache-b53f55c1-1867-410c-9c53-f552ff30d697" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 831.883234] env[70020]: DEBUG nova.network.neutron [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 831.884365] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618274, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.913947] env[70020]: DEBUG nova.compute.manager [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 831.954896] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e831c7e-58b7-415f-b22c-d9d44898eba8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.964302] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fc46d56-9b3c-4fb7-94e4-832edf47a519 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.001557] env[70020]: DEBUG oslo_concurrency.lockutils [req-5fff513e-b208-4a83-a2a5-3ad97ed6f4b7 req-72813edf-d5f2-46a7-b9f4-f2d23b2bbba5 service nova] Releasing lock "refresh_cache-5c216231-afc5-41df-a243-bb2a17c20bfe" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 832.001821] env[70020]: DEBUG nova.compute.manager [req-5fff513e-b208-4a83-a2a5-3ad97ed6f4b7 req-72813edf-d5f2-46a7-b9f4-f2d23b2bbba5 service nova] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Received event network-vif-deleted-632c2a24-8d7d-4754-87e6-79e1f5f4b8bf {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 832.003057] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84051887-4232-48d1-8694-d01d3bf0302f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.010979] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9997f5c3-880f-4de4-9bfc-411e86767f28 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.025980] env[70020]: DEBUG nova.compute.provider_tree [None req-acec40ea-eb4b-49de-9535-9e8609bfd877 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 832.183954] env[70020]: DEBUG oslo_vmware.api [None req-293a3a76-4aff-4d7b-9854-f3c0cfec8bb2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618275, 'name': CreateSnapshot_Task, 'duration_secs': 1.220746} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.183954] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-293a3a76-4aff-4d7b-9854-f3c0cfec8bb2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Created Snapshot of the VM instance {{(pid=70020) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 832.184661] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11e1611f-90ae-4f5a-9de2-96d1654d2a01 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.370109] env[70020]: DEBUG oslo_vmware.api [None req-4f0b9e27-adb7-422e-adf0-ec062fee5551 tempest-ServersAdminNegativeTestJSON-642453273 tempest-ServersAdminNegativeTestJSON-642453273-project-admin] Task: {'id': task-3618276, 'name': SuspendVM_Task, 'duration_secs': 0.907449} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.370422] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4f0b9e27-adb7-422e-adf0-ec062fee5551 tempest-ServersAdminNegativeTestJSON-642453273 tempest-ServersAdminNegativeTestJSON-642453273-project-admin] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Suspended the VM {{(pid=70020) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 832.370628] env[70020]: DEBUG nova.compute.manager [None req-4f0b9e27-adb7-422e-adf0-ec062fee5551 tempest-ServersAdminNegativeTestJSON-642453273 tempest-ServersAdminNegativeTestJSON-642453273-project-admin] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 832.371473] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e24549f-8083-40c8-a36f-c4fa0c9b1c8d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.385427] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618274, 'name': CreateVM_Task, 'duration_secs': 1.591509} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.386491] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 832.387865] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.387865] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 832.387865] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 832.389256] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab9e51b9-9563-4d99-8f31-ace41a3fedef {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.394735] env[70020]: DEBUG oslo_vmware.api [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 832.394735] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5280cc62-021e-ece2-a5d2-b70abb871db7" [ 832.394735] env[70020]: _type = "Task" [ 832.394735] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.404129] env[70020]: DEBUG oslo_vmware.api [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5280cc62-021e-ece2-a5d2-b70abb871db7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.422883] env[70020]: DEBUG nova.network.neutron [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 832.451522] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 832.530242] env[70020]: DEBUG nova.scheduler.client.report [None req-acec40ea-eb4b-49de-9535-9e8609bfd877 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 832.594084] env[70020]: DEBUG nova.network.neutron [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Updating instance_info_cache with network_info: [{"id": "ac1e36da-5de5-4451-a9e7-39165ab5f152", "address": "fa:16:3e:c2:87:4b", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.51", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac1e36da-5d", "ovs_interfaceid": "ac1e36da-5de5-4451-a9e7-39165ab5f152", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.706307] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-293a3a76-4aff-4d7b-9854-f3c0cfec8bb2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Creating linked-clone VM from snapshot {{(pid=70020) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 832.707486] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-5a6f07da-61bd-4a4c-925b-b78079aa6399 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.712990] env[70020]: DEBUG nova.compute.manager [req-059e9c39-d828-4a7a-ad41-c434877acdc5 req-e8a4421b-8dc3-410b-a108-bf25892651d1 service nova] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Received event network-vif-plugged-ac1e36da-5de5-4451-a9e7-39165ab5f152 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 832.713136] env[70020]: DEBUG oslo_concurrency.lockutils [req-059e9c39-d828-4a7a-ad41-c434877acdc5 req-e8a4421b-8dc3-410b-a108-bf25892651d1 service nova] Acquiring lock "b53f55c1-1867-410c-9c53-f552ff30d697-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 832.713355] env[70020]: DEBUG oslo_concurrency.lockutils [req-059e9c39-d828-4a7a-ad41-c434877acdc5 req-e8a4421b-8dc3-410b-a108-bf25892651d1 service nova] Lock "b53f55c1-1867-410c-9c53-f552ff30d697-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 832.713526] env[70020]: DEBUG oslo_concurrency.lockutils [req-059e9c39-d828-4a7a-ad41-c434877acdc5 req-e8a4421b-8dc3-410b-a108-bf25892651d1 service nova] Lock "b53f55c1-1867-410c-9c53-f552ff30d697-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.713700] env[70020]: DEBUG nova.compute.manager [req-059e9c39-d828-4a7a-ad41-c434877acdc5 req-e8a4421b-8dc3-410b-a108-bf25892651d1 service nova] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] No waiting events found dispatching network-vif-plugged-ac1e36da-5de5-4451-a9e7-39165ab5f152 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 832.713862] env[70020]: WARNING nova.compute.manager [req-059e9c39-d828-4a7a-ad41-c434877acdc5 req-e8a4421b-8dc3-410b-a108-bf25892651d1 service nova] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Received unexpected event network-vif-plugged-ac1e36da-5de5-4451-a9e7-39165ab5f152 for instance with vm_state building and task_state spawning. [ 832.714039] env[70020]: DEBUG nova.compute.manager [req-059e9c39-d828-4a7a-ad41-c434877acdc5 req-e8a4421b-8dc3-410b-a108-bf25892651d1 service nova] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Received event network-changed-ac1e36da-5de5-4451-a9e7-39165ab5f152 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 832.714194] env[70020]: DEBUG nova.compute.manager [req-059e9c39-d828-4a7a-ad41-c434877acdc5 req-e8a4421b-8dc3-410b-a108-bf25892651d1 service nova] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Refreshing instance network info cache due to event network-changed-ac1e36da-5de5-4451-a9e7-39165ab5f152. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 832.714374] env[70020]: DEBUG oslo_concurrency.lockutils [req-059e9c39-d828-4a7a-ad41-c434877acdc5 req-e8a4421b-8dc3-410b-a108-bf25892651d1 service nova] Acquiring lock "refresh_cache-b53f55c1-1867-410c-9c53-f552ff30d697" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.719594] env[70020]: DEBUG oslo_vmware.api [None req-293a3a76-4aff-4d7b-9854-f3c0cfec8bb2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for the task: (returnval){ [ 832.719594] env[70020]: value = "task-3618277" [ 832.719594] env[70020]: _type = "Task" [ 832.719594] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.729020] env[70020]: DEBUG oslo_vmware.api [None req-293a3a76-4aff-4d7b-9854-f3c0cfec8bb2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618277, 'name': CloneVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.906674] env[70020]: DEBUG oslo_vmware.api [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5280cc62-021e-ece2-a5d2-b70abb871db7, 'name': SearchDatastore_Task, 'duration_secs': 0.018145} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.906995] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 832.907347] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 832.907482] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.907622] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 832.907814] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 832.908153] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-02ebe823-717c-4187-a12e-9961a2740cff {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.919195] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 832.919195] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 832.919391] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10f81ecb-916a-4293-983e-bd31dd3366da {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.926076] env[70020]: DEBUG oslo_vmware.api [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 832.926076] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]523e12a2-613a-3981-bcbb-a20f063e602a" [ 832.926076] env[70020]: _type = "Task" [ 832.926076] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.936350] env[70020]: DEBUG oslo_vmware.api [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]523e12a2-613a-3981-bcbb-a20f063e602a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.037546] env[70020]: DEBUG oslo_concurrency.lockutils [None req-acec40ea-eb4b-49de-9535-9e8609bfd877 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.130s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 833.040028] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.548s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 833.041490] env[70020]: INFO nova.compute.claims [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 833.067308] env[70020]: INFO nova.scheduler.client.report [None req-acec40ea-eb4b-49de-9535-9e8609bfd877 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Deleted allocations for instance 48efbd17-ff4e-426a-a135-f43cae8c97d0 [ 833.098232] env[70020]: DEBUG oslo_concurrency.lockutils [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Releasing lock "refresh_cache-b53f55c1-1867-410c-9c53-f552ff30d697" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 833.098602] env[70020]: DEBUG nova.compute.manager [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Instance network_info: |[{"id": "ac1e36da-5de5-4451-a9e7-39165ab5f152", "address": "fa:16:3e:c2:87:4b", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.51", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac1e36da-5d", "ovs_interfaceid": "ac1e36da-5de5-4451-a9e7-39165ab5f152", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 833.099346] env[70020]: DEBUG oslo_concurrency.lockutils [req-059e9c39-d828-4a7a-ad41-c434877acdc5 req-e8a4421b-8dc3-410b-a108-bf25892651d1 service nova] Acquired lock "refresh_cache-b53f55c1-1867-410c-9c53-f552ff30d697" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 833.099635] env[70020]: DEBUG nova.network.neutron [req-059e9c39-d828-4a7a-ad41-c434877acdc5 req-e8a4421b-8dc3-410b-a108-bf25892651d1 service nova] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Refreshing network info cache for port ac1e36da-5de5-4451-a9e7-39165ab5f152 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 833.100860] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:87:4b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '822050c7-1845-485d-b87e-73778d21c33c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ac1e36da-5de5-4451-a9e7-39165ab5f152', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 833.109291] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 833.110526] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 833.110941] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-026ca790-0014-4c3c-8eb1-bc428c2c5daf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.134135] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 833.134135] env[70020]: value = "task-3618278" [ 833.134135] env[70020]: _type = "Task" [ 833.134135] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.143078] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618278, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.266522] env[70020]: DEBUG oslo_vmware.api [None req-293a3a76-4aff-4d7b-9854-f3c0cfec8bb2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618277, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.442280] env[70020]: DEBUG oslo_vmware.api [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]523e12a2-613a-3981-bcbb-a20f063e602a, 'name': SearchDatastore_Task, 'duration_secs': 0.012816} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.443351] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d5f401a-5339-4aff-a265-6ec211d0dbd5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.450676] env[70020]: DEBUG oslo_vmware.api [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 833.450676] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5289285c-4f15-b2dd-4068-f195091a0bea" [ 833.450676] env[70020]: _type = "Task" [ 833.450676] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.461563] env[70020]: DEBUG oslo_vmware.api [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5289285c-4f15-b2dd-4068-f195091a0bea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.575163] env[70020]: DEBUG oslo_concurrency.lockutils [None req-acec40ea-eb4b-49de-9535-9e8609bfd877 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "48efbd17-ff4e-426a-a135-f43cae8c97d0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.955s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 833.649573] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618278, 'name': CreateVM_Task, 'duration_secs': 0.476241} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.652659] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 833.653556] env[70020]: DEBUG oslo_concurrency.lockutils [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.653916] env[70020]: DEBUG oslo_concurrency.lockutils [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 833.654240] env[70020]: DEBUG oslo_concurrency.lockutils [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 833.655027] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4146180c-5e09-4753-b1d5-5d5ac10f302c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.661826] env[70020]: DEBUG oslo_vmware.api [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for the task: (returnval){ [ 833.661826] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]521540a9-c189-e971-5e9e-bfa402511e99" [ 833.661826] env[70020]: _type = "Task" [ 833.661826] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.676564] env[70020]: DEBUG oslo_vmware.api [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]521540a9-c189-e971-5e9e-bfa402511e99, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.734052] env[70020]: DEBUG oslo_vmware.api [None req-293a3a76-4aff-4d7b-9854-f3c0cfec8bb2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618277, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.862763] env[70020]: DEBUG nova.network.neutron [req-059e9c39-d828-4a7a-ad41-c434877acdc5 req-e8a4421b-8dc3-410b-a108-bf25892651d1 service nova] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Updated VIF entry in instance network info cache for port ac1e36da-5de5-4451-a9e7-39165ab5f152. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 833.863163] env[70020]: DEBUG nova.network.neutron [req-059e9c39-d828-4a7a-ad41-c434877acdc5 req-e8a4421b-8dc3-410b-a108-bf25892651d1 service nova] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Updating instance_info_cache with network_info: [{"id": "ac1e36da-5de5-4451-a9e7-39165ab5f152", "address": "fa:16:3e:c2:87:4b", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.51", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac1e36da-5d", "ovs_interfaceid": "ac1e36da-5de5-4451-a9e7-39165ab5f152", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 833.964318] env[70020]: DEBUG oslo_vmware.api [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5289285c-4f15-b2dd-4068-f195091a0bea, 'name': SearchDatastore_Task, 'duration_secs': 0.014629} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.964712] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 833.965110] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 5c216231-afc5-41df-a243-bb2a17c20bfe/5c216231-afc5-41df-a243-bb2a17c20bfe.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 833.965452] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c2929b99-ab37-40a6-8a1e-4b5995abb920 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.973658] env[70020]: DEBUG oslo_vmware.api [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 833.973658] env[70020]: value = "task-3618279" [ 833.973658] env[70020]: _type = "Task" [ 833.973658] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.982903] env[70020]: DEBUG oslo_vmware.api [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618279, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.176949] env[70020]: DEBUG oslo_vmware.api [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]521540a9-c189-e971-5e9e-bfa402511e99, 'name': SearchDatastore_Task, 'duration_secs': 0.013351} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.180997] env[70020]: DEBUG oslo_concurrency.lockutils [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 834.180997] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 834.181256] env[70020]: DEBUG oslo_concurrency.lockutils [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.181434] env[70020]: DEBUG oslo_concurrency.lockutils [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 834.181814] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 834.182583] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ef25f943-46f1-4bbd-be3a-c1217d122460 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.199191] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 834.199540] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 834.200380] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7bd6f026-d998-48fe-a675-cf12d5688d89 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.210837] env[70020]: DEBUG oslo_vmware.api [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for the task: (returnval){ [ 834.210837] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52ef7d08-8b10-d712-c961-2e3ac3bfcc01" [ 834.210837] env[70020]: _type = "Task" [ 834.210837] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.230832] env[70020]: DEBUG oslo_vmware.api [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ef7d08-8b10-d712-c961-2e3ac3bfcc01, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.241551] env[70020]: DEBUG oslo_vmware.api [None req-293a3a76-4aff-4d7b-9854-f3c0cfec8bb2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618277, 'name': CloneVM_Task} progress is 95%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.367400] env[70020]: DEBUG oslo_concurrency.lockutils [req-059e9c39-d828-4a7a-ad41-c434877acdc5 req-e8a4421b-8dc3-410b-a108-bf25892651d1 service nova] Releasing lock "refresh_cache-b53f55c1-1867-410c-9c53-f552ff30d697" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 834.485879] env[70020]: DEBUG oslo_vmware.api [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618279, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.476013} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.486114] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 5c216231-afc5-41df-a243-bb2a17c20bfe/5c216231-afc5-41df-a243-bb2a17c20bfe.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 834.486338] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 834.486594] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-71bd452b-2ecd-48e1-88e1-e6eee56b22dc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.492366] env[70020]: DEBUG oslo_vmware.api [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 834.492366] env[70020]: value = "task-3618280" [ 834.492366] env[70020]: _type = "Task" [ 834.492366] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.503515] env[70020]: DEBUG oslo_vmware.api [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618280, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.647643] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a88e015a-4cf2-4657-8be7-8145af5fcf5e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.656018] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07f24af8-523a-4965-bd30-6f15f2acab6b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.696127] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a1f4d10-e019-4712-928b-5863a66c9976 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.706515] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e16c1cda-d466-4c7a-853c-3b3c6c68dae7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.725948] env[70020]: DEBUG nova.compute.provider_tree [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 834.735200] env[70020]: DEBUG oslo_vmware.api [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ef7d08-8b10-d712-c961-2e3ac3bfcc01, 'name': SearchDatastore_Task, 'duration_secs': 0.061265} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.735854] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78717515-a6cb-426b-9a67-9a8d546ab64f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.741713] env[70020]: DEBUG oslo_vmware.api [None req-293a3a76-4aff-4d7b-9854-f3c0cfec8bb2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618277, 'name': CloneVM_Task, 'duration_secs': 1.685874} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.742332] env[70020]: INFO nova.virt.vmwareapi.vmops [None req-293a3a76-4aff-4d7b-9854-f3c0cfec8bb2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Created linked-clone VM from snapshot [ 834.743186] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4ee4302-725a-44a7-ad63-dd3f8a4e7d08 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.748500] env[70020]: DEBUG oslo_vmware.api [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for the task: (returnval){ [ 834.748500] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52fbdbf9-12f0-16de-7198-526392874d30" [ 834.748500] env[70020]: _type = "Task" [ 834.748500] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.756636] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-293a3a76-4aff-4d7b-9854-f3c0cfec8bb2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Uploading image f911f098-6c4e-4434-bbd1-693555fb5d56 {{(pid=70020) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 834.767424] env[70020]: DEBUG oslo_vmware.api [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52fbdbf9-12f0-16de-7198-526392874d30, 'name': SearchDatastore_Task, 'duration_secs': 0.009363} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.767424] env[70020]: DEBUG oslo_concurrency.lockutils [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 834.767424] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] b53f55c1-1867-410c-9c53-f552ff30d697/b53f55c1-1867-410c-9c53-f552ff30d697.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 834.767424] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4cafdbe4-dc30-49c2-bbfd-36f2714bdcec {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.774542] env[70020]: DEBUG oslo_vmware.api [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for the task: (returnval){ [ 834.774542] env[70020]: value = "task-3618281" [ 834.774542] env[70020]: _type = "Task" [ 834.774542] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.779060] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-293a3a76-4aff-4d7b-9854-f3c0cfec8bb2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Destroying the VM {{(pid=70020) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 834.779060] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-1df9717a-6cc4-47e5-8c64-520faafddcb0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.785670] env[70020]: DEBUG oslo_vmware.api [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618281, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.787273] env[70020]: DEBUG oslo_vmware.api [None req-293a3a76-4aff-4d7b-9854-f3c0cfec8bb2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for the task: (returnval){ [ 834.787273] env[70020]: value = "task-3618282" [ 834.787273] env[70020]: _type = "Task" [ 834.787273] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.798366] env[70020]: DEBUG oslo_vmware.api [None req-293a3a76-4aff-4d7b-9854-f3c0cfec8bb2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618282, 'name': Destroy_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.004280] env[70020]: DEBUG oslo_vmware.api [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618280, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072389} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.004602] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 835.005690] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8198d054-6740-4ebf-b910-2eeea29ba5bb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.032774] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] 5c216231-afc5-41df-a243-bb2a17c20bfe/5c216231-afc5-41df-a243-bb2a17c20bfe.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 835.033214] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5210feaa-668b-49a0-b924-a16b9f6a7837 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.054620] env[70020]: DEBUG oslo_vmware.api [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 835.054620] env[70020]: value = "task-3618283" [ 835.054620] env[70020]: _type = "Task" [ 835.054620] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.064393] env[70020]: DEBUG oslo_vmware.api [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618283, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.237432] env[70020]: DEBUG nova.scheduler.client.report [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 835.286761] env[70020]: DEBUG oslo_vmware.api [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618281, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.297533] env[70020]: DEBUG oslo_vmware.api [None req-293a3a76-4aff-4d7b-9854-f3c0cfec8bb2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618282, 'name': Destroy_Task, 'duration_secs': 0.411228} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.297851] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-293a3a76-4aff-4d7b-9854-f3c0cfec8bb2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Destroyed the VM [ 835.298285] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-293a3a76-4aff-4d7b-9854-f3c0cfec8bb2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Deleting Snapshot of the VM instance {{(pid=70020) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 835.298407] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-8647924d-c345-4918-91ca-fa7fb290f9bd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.308826] env[70020]: DEBUG oslo_vmware.api [None req-293a3a76-4aff-4d7b-9854-f3c0cfec8bb2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for the task: (returnval){ [ 835.308826] env[70020]: value = "task-3618284" [ 835.308826] env[70020]: _type = "Task" [ 835.308826] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.316777] env[70020]: DEBUG oslo_vmware.api [None req-293a3a76-4aff-4d7b-9854-f3c0cfec8bb2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618284, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.457136] env[70020]: DEBUG oslo_concurrency.lockutils [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquiring lock "61875dcc-5b76-409b-987f-4ae875909257" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 835.457136] env[70020]: DEBUG oslo_concurrency.lockutils [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "61875dcc-5b76-409b-987f-4ae875909257" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 835.488593] env[70020]: DEBUG oslo_concurrency.lockutils [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquiring lock "d65ab5e0-189c-43e1-accf-16248ad02852" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 835.488829] env[70020]: DEBUG oslo_concurrency.lockutils [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "d65ab5e0-189c-43e1-accf-16248ad02852" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 835.564874] env[70020]: DEBUG oslo_vmware.api [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618283, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.743990] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.704s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 835.744570] env[70020]: DEBUG nova.compute.manager [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 835.747164] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.223s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 835.748544] env[70020]: INFO nova.compute.claims [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 835.789256] env[70020]: DEBUG oslo_vmware.api [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618281, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.589202} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.789591] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] b53f55c1-1867-410c-9c53-f552ff30d697/b53f55c1-1867-410c-9c53-f552ff30d697.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 835.789809] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 835.790182] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-82794a3c-b4a5-49a5-b8ae-fc273e28cc94 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.797431] env[70020]: DEBUG oslo_vmware.api [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for the task: (returnval){ [ 835.797431] env[70020]: value = "task-3618285" [ 835.797431] env[70020]: _type = "Task" [ 835.797431] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.807415] env[70020]: DEBUG oslo_vmware.api [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618285, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.816717] env[70020]: DEBUG oslo_vmware.api [None req-293a3a76-4aff-4d7b-9854-f3c0cfec8bb2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618284, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.067639] env[70020]: DEBUG oslo_vmware.api [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618283, 'name': ReconfigVM_Task, 'duration_secs': 0.605416} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.067998] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Reconfigured VM instance instance-00000037 to attach disk [datastore1] 5c216231-afc5-41df-a243-bb2a17c20bfe/5c216231-afc5-41df-a243-bb2a17c20bfe.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 836.068814] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-80bff845-22d7-403b-8566-34ccad825542 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.076162] env[70020]: DEBUG oslo_vmware.api [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 836.076162] env[70020]: value = "task-3618286" [ 836.076162] env[70020]: _type = "Task" [ 836.076162] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.085131] env[70020]: DEBUG oslo_vmware.api [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618286, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.139224] env[70020]: DEBUG oslo_vmware.rw_handles [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521a22bb-67c9-6f81-bea3-596ef64734aa/disk-0.vmdk. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 836.140152] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b0dd84f-428d-4cf3-8d3a-ad5ade154bb8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.145643] env[70020]: DEBUG oslo_vmware.rw_handles [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521a22bb-67c9-6f81-bea3-596ef64734aa/disk-0.vmdk is in state: ready. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 836.145833] env[70020]: ERROR oslo_vmware.rw_handles [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521a22bb-67c9-6f81-bea3-596ef64734aa/disk-0.vmdk due to incomplete transfer. [ 836.145994] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-e1aed8e0-b4a9-4215-88a7-64d5f3897dda {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.151974] env[70020]: DEBUG oslo_vmware.rw_handles [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521a22bb-67c9-6f81-bea3-596ef64734aa/disk-0.vmdk. {{(pid=70020) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 836.152208] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Uploaded image 038d3b5b-38fb-498f-b4cc-5ed167e098c3 to the Glance image server {{(pid=70020) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 836.154380] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Destroying the VM {{(pid=70020) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 836.154607] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-c6932a88-fed3-42e5-935a-1205d55d6cce {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.160014] env[70020]: DEBUG oslo_vmware.api [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 836.160014] env[70020]: value = "task-3618287" [ 836.160014] env[70020]: _type = "Task" [ 836.160014] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.167563] env[70020]: DEBUG oslo_vmware.api [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618287, 'name': Destroy_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.253670] env[70020]: DEBUG nova.compute.utils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 836.257237] env[70020]: DEBUG nova.compute.manager [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 836.257348] env[70020]: DEBUG nova.network.neutron [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 836.307474] env[70020]: DEBUG oslo_vmware.api [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618285, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073631} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.309181] env[70020]: DEBUG nova.policy [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f327ea11ea09400b912db630dafe6c31', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0986c957c04b40e2a8e252212abadb4c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 836.310402] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 836.312823] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca61a156-e262-46a0-a679-19421868927e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.321999] env[70020]: DEBUG oslo_vmware.api [None req-293a3a76-4aff-4d7b-9854-f3c0cfec8bb2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618284, 'name': RemoveSnapshot_Task, 'duration_secs': 0.977597} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.330595] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-293a3a76-4aff-4d7b-9854-f3c0cfec8bb2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Deleted Snapshot of the VM instance {{(pid=70020) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 836.341882] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] b53f55c1-1867-410c-9c53-f552ff30d697/b53f55c1-1867-410c-9c53-f552ff30d697.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 836.342907] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-68e96fc3-4b0d-46cd-be4f-26f2fb7e4d8a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.358266] env[70020]: DEBUG oslo_concurrency.lockutils [None req-dcecbce0-59b1-48cc-9bcb-69e1ee765806 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Acquiring lock "38839949-c717-4f0b-97a7-108d87417b88" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 836.358385] env[70020]: DEBUG oslo_concurrency.lockutils [None req-dcecbce0-59b1-48cc-9bcb-69e1ee765806 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Lock "38839949-c717-4f0b-97a7-108d87417b88" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 836.358589] env[70020]: DEBUG oslo_concurrency.lockutils [None req-dcecbce0-59b1-48cc-9bcb-69e1ee765806 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Acquiring lock "38839949-c717-4f0b-97a7-108d87417b88-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 836.358779] env[70020]: DEBUG oslo_concurrency.lockutils [None req-dcecbce0-59b1-48cc-9bcb-69e1ee765806 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Lock "38839949-c717-4f0b-97a7-108d87417b88-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 836.358946] env[70020]: DEBUG oslo_concurrency.lockutils [None req-dcecbce0-59b1-48cc-9bcb-69e1ee765806 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Lock "38839949-c717-4f0b-97a7-108d87417b88-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 836.361773] env[70020]: INFO nova.compute.manager [None req-dcecbce0-59b1-48cc-9bcb-69e1ee765806 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Terminating instance [ 836.366594] env[70020]: DEBUG oslo_vmware.api [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for the task: (returnval){ [ 836.366594] env[70020]: value = "task-3618288" [ 836.366594] env[70020]: _type = "Task" [ 836.366594] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.376051] env[70020]: DEBUG oslo_vmware.api [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618288, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.589147] env[70020]: DEBUG oslo_vmware.api [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618286, 'name': Rename_Task, 'duration_secs': 0.301532} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.589147] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 836.589147] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9acf8a14-35fb-47d3-bd1d-96a2ea763895 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.591988] env[70020]: DEBUG nova.network.neutron [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Successfully created port: a4cb005b-e1cb-4efe-9272-a7adbf42d5bc {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 836.595441] env[70020]: DEBUG oslo_vmware.api [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 836.595441] env[70020]: value = "task-3618289" [ 836.595441] env[70020]: _type = "Task" [ 836.595441] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.608660] env[70020]: DEBUG oslo_vmware.api [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618289, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.670336] env[70020]: DEBUG oslo_vmware.api [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618287, 'name': Destroy_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.758434] env[70020]: DEBUG nova.compute.manager [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 836.844195] env[70020]: WARNING nova.compute.manager [None req-293a3a76-4aff-4d7b-9854-f3c0cfec8bb2 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Image not found during snapshot: nova.exception.ImageNotFound: Image f911f098-6c4e-4434-bbd1-693555fb5d56 could not be found. [ 836.867837] env[70020]: DEBUG nova.compute.manager [None req-dcecbce0-59b1-48cc-9bcb-69e1ee765806 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 836.868221] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-dcecbce0-59b1-48cc-9bcb-69e1ee765806 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 836.869781] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a595687-c874-4b7e-9c8a-3b7ec6d3f1e6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.884254] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-dcecbce0-59b1-48cc-9bcb-69e1ee765806 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 836.886994] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9f7d126e-0728-41f8-bf23-1e49a80c3cf2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.888531] env[70020]: DEBUG oslo_vmware.api [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618288, 'name': ReconfigVM_Task, 'duration_secs': 0.301968} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.889133] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Reconfigured VM instance instance-00000038 to attach disk [datastore1] b53f55c1-1867-410c-9c53-f552ff30d697/b53f55c1-1867-410c-9c53-f552ff30d697.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 836.892295] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-69e127b0-83a9-4289-9f21-e762bd474e59 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.899969] env[70020]: DEBUG oslo_vmware.api [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for the task: (returnval){ [ 836.899969] env[70020]: value = "task-3618291" [ 836.899969] env[70020]: _type = "Task" [ 836.899969] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.908875] env[70020]: DEBUG oslo_vmware.api [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618291, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.959217] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-dcecbce0-59b1-48cc-9bcb-69e1ee765806 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 836.959476] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-dcecbce0-59b1-48cc-9bcb-69e1ee765806 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 836.959660] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-dcecbce0-59b1-48cc-9bcb-69e1ee765806 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Deleting the datastore file [datastore2] 38839949-c717-4f0b-97a7-108d87417b88 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 836.959965] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b57325b4-85cf-4ab5-bae9-69e4540248a2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.966487] env[70020]: DEBUG oslo_vmware.api [None req-dcecbce0-59b1-48cc-9bcb-69e1ee765806 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Waiting for the task: (returnval){ [ 836.966487] env[70020]: value = "task-3618292" [ 836.966487] env[70020]: _type = "Task" [ 836.966487] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.974557] env[70020]: DEBUG oslo_vmware.api [None req-dcecbce0-59b1-48cc-9bcb-69e1ee765806 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Task: {'id': task-3618292, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.105048] env[70020]: DEBUG oslo_vmware.api [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618289, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.180745] env[70020]: DEBUG oslo_vmware.api [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618287, 'name': Destroy_Task, 'duration_secs': 0.925733} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.180745] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Destroyed the VM [ 837.180745] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Deleting Snapshot of the VM instance {{(pid=70020) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 837.183464] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-8f0e73bf-0e76-4b9d-bce8-1f83da57a934 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.190441] env[70020]: DEBUG oslo_vmware.api [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 837.190441] env[70020]: value = "task-3618293" [ 837.190441] env[70020]: _type = "Task" [ 837.190441] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.201629] env[70020]: DEBUG oslo_vmware.api [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618293, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.270097] env[70020]: DEBUG oslo_concurrency.lockutils [None req-84b47043-8591-4bb1-a621-67ff3b2d6710 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Acquiring lock "29d41731-4ae2-4cc4-bfda-b7356922c8ff" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 837.270376] env[70020]: DEBUG oslo_concurrency.lockutils [None req-84b47043-8591-4bb1-a621-67ff3b2d6710 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Lock "29d41731-4ae2-4cc4-bfda-b7356922c8ff" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 837.270592] env[70020]: DEBUG oslo_concurrency.lockutils [None req-84b47043-8591-4bb1-a621-67ff3b2d6710 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Acquiring lock "29d41731-4ae2-4cc4-bfda-b7356922c8ff-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 837.270898] env[70020]: DEBUG oslo_concurrency.lockutils [None req-84b47043-8591-4bb1-a621-67ff3b2d6710 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Lock "29d41731-4ae2-4cc4-bfda-b7356922c8ff-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 837.270976] env[70020]: DEBUG oslo_concurrency.lockutils [None req-84b47043-8591-4bb1-a621-67ff3b2d6710 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Lock "29d41731-4ae2-4cc4-bfda-b7356922c8ff-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 837.275359] env[70020]: INFO nova.compute.manager [None req-84b47043-8591-4bb1-a621-67ff3b2d6710 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Terminating instance [ 837.302287] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be1e92c4-d69c-4e57-84f7-f9ae577baa20 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.311359] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-338bf2d3-73f5-40f5-b413-c179aa96f95d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.345356] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85452b7e-2e03-4383-950d-a7aa13ebe879 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.356490] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67d421d4-426b-4e16-bd2d-bf8be03b88de {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.367427] env[70020]: DEBUG nova.compute.provider_tree [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 837.410112] env[70020]: DEBUG oslo_vmware.api [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618291, 'name': Rename_Task, 'duration_secs': 0.158617} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.410207] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 837.410471] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d59ecece-45ec-4db3-8e7f-ad9530bcf974 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.417108] env[70020]: DEBUG oslo_vmware.api [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for the task: (returnval){ [ 837.417108] env[70020]: value = "task-3618294" [ 837.417108] env[70020]: _type = "Task" [ 837.417108] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.424587] env[70020]: DEBUG oslo_vmware.api [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618294, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.476218] env[70020]: DEBUG oslo_vmware.api [None req-dcecbce0-59b1-48cc-9bcb-69e1ee765806 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Task: {'id': task-3618292, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.29275} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.476567] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-dcecbce0-59b1-48cc-9bcb-69e1ee765806 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 837.476773] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-dcecbce0-59b1-48cc-9bcb-69e1ee765806 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 837.476973] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-dcecbce0-59b1-48cc-9bcb-69e1ee765806 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 837.477127] env[70020]: INFO nova.compute.manager [None req-dcecbce0-59b1-48cc-9bcb-69e1ee765806 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Took 0.61 seconds to destroy the instance on the hypervisor. [ 837.477461] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dcecbce0-59b1-48cc-9bcb-69e1ee765806 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 837.477723] env[70020]: DEBUG nova.compute.manager [-] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 837.477827] env[70020]: DEBUG nova.network.neutron [-] [instance: 38839949-c717-4f0b-97a7-108d87417b88] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 837.608913] env[70020]: DEBUG oslo_vmware.api [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618289, 'name': PowerOnVM_Task, 'duration_secs': 0.665832} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.609355] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 837.609643] env[70020]: INFO nova.compute.manager [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Took 9.54 seconds to spawn the instance on the hypervisor. [ 837.609895] env[70020]: DEBUG nova.compute.manager [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 837.610936] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e7a90ad-657c-4881-bbd3-012787d1a78f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.702782] env[70020]: DEBUG oslo_vmware.api [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618293, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.774230] env[70020]: DEBUG nova.compute.manager [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 837.779408] env[70020]: DEBUG nova.compute.manager [None req-84b47043-8591-4bb1-a621-67ff3b2d6710 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 837.779681] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-84b47043-8591-4bb1-a621-67ff3b2d6710 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 837.780725] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87d7d260-8e35-4623-b010-a46b1b57548b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.791249] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-84b47043-8591-4bb1-a621-67ff3b2d6710 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 837.791816] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5f2e382e-a822-4725-8f66-457623dd1b73 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.796353] env[70020]: DEBUG nova.compute.manager [req-ec799032-df01-4fe9-a146-4b08000e74d5 req-851d9e7c-36c5-4e35-be77-3c6163114282 service nova] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Received event network-vif-deleted-43c0cb8b-c829-4fa8-908c-527551c10fb9 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 837.796652] env[70020]: INFO nova.compute.manager [req-ec799032-df01-4fe9-a146-4b08000e74d5 req-851d9e7c-36c5-4e35-be77-3c6163114282 service nova] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Neutron deleted interface 43c0cb8b-c829-4fa8-908c-527551c10fb9; detaching it from the instance and deleting it from the info cache [ 837.796967] env[70020]: DEBUG nova.network.neutron [req-ec799032-df01-4fe9-a146-4b08000e74d5 req-851d9e7c-36c5-4e35-be77-3c6163114282 service nova] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.805441] env[70020]: DEBUG oslo_vmware.api [None req-84b47043-8591-4bb1-a621-67ff3b2d6710 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for the task: (returnval){ [ 837.805441] env[70020]: value = "task-3618295" [ 837.805441] env[70020]: _type = "Task" [ 837.805441] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.812227] env[70020]: DEBUG nova.virt.hardware [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 837.812521] env[70020]: DEBUG nova.virt.hardware [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 837.812680] env[70020]: DEBUG nova.virt.hardware [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 837.812892] env[70020]: DEBUG nova.virt.hardware [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 837.813051] env[70020]: DEBUG nova.virt.hardware [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 837.813203] env[70020]: DEBUG nova.virt.hardware [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 837.813426] env[70020]: DEBUG nova.virt.hardware [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 837.813582] env[70020]: DEBUG nova.virt.hardware [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 837.813745] env[70020]: DEBUG nova.virt.hardware [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 837.813903] env[70020]: DEBUG nova.virt.hardware [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 837.814194] env[70020]: DEBUG nova.virt.hardware [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 837.815647] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b45bd7e6-7bd2-4a4c-8904-a84584728fb1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.826933] env[70020]: DEBUG oslo_vmware.api [None req-84b47043-8591-4bb1-a621-67ff3b2d6710 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618295, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.830300] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf78cda8-d864-4f93-be31-6dd882e75ac3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.870680] env[70020]: DEBUG nova.scheduler.client.report [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 837.928982] env[70020]: DEBUG oslo_vmware.api [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618294, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.138120] env[70020]: INFO nova.compute.manager [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Took 39.02 seconds to build instance. [ 838.140580] env[70020]: DEBUG nova.network.neutron [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Successfully updated port: a4cb005b-e1cb-4efe-9272-a7adbf42d5bc {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 838.204818] env[70020]: DEBUG oslo_vmware.api [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618293, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.236228] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "8317f386-44d0-4b1b-8590-d0336fafac21" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 838.236457] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "8317f386-44d0-4b1b-8590-d0336fafac21" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 838.268350] env[70020]: DEBUG nova.network.neutron [-] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.302958] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ad946db0-2443-4496-b924-b0c151ed4a5d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.312524] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b122fec6-afc0-4730-9d10-aa9e7d970f4a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.325865] env[70020]: DEBUG oslo_vmware.api [None req-84b47043-8591-4bb1-a621-67ff3b2d6710 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618295, 'name': PowerOffVM_Task, 'duration_secs': 0.273777} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.326523] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-84b47043-8591-4bb1-a621-67ff3b2d6710 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 838.326707] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-84b47043-8591-4bb1-a621-67ff3b2d6710 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 838.326960] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7c3ddb3c-f52b-41e4-b917-88c97dd9e49b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.351532] env[70020]: DEBUG nova.compute.manager [req-ec799032-df01-4fe9-a146-4b08000e74d5 req-851d9e7c-36c5-4e35-be77-3c6163114282 service nova] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Detach interface failed, port_id=43c0cb8b-c829-4fa8-908c-527551c10fb9, reason: Instance 38839949-c717-4f0b-97a7-108d87417b88 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 838.376549] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.629s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 838.377048] env[70020]: DEBUG nova.compute.manager [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 838.379677] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9c7b241b-b181-42d5-8145-f051dc8a0875 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.129s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 838.379891] env[70020]: DEBUG nova.objects.instance [None req-9c7b241b-b181-42d5-8145-f051dc8a0875 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Lazy-loading 'resources' on Instance uuid 1f95bfa8-bc97-4ed7-8c33-c00297430bf5 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 838.427864] env[70020]: DEBUG oslo_vmware.api [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618294, 'name': PowerOnVM_Task, 'duration_secs': 0.588727} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.428167] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 838.428356] env[70020]: INFO nova.compute.manager [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Took 7.69 seconds to spawn the instance on the hypervisor. [ 838.428526] env[70020]: DEBUG nova.compute.manager [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 838.429309] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf527a26-ad23-49c2-8bce-7642cff33898 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.642187] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e50e2e69-ec0b-46da-8467-b195ed2cc89f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "5c216231-afc5-41df-a243-bb2a17c20bfe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.196s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 838.643920] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Acquiring lock "refresh_cache-f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.648166] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Acquired lock "refresh_cache-f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 838.648368] env[70020]: DEBUG nova.network.neutron [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 838.704187] env[70020]: DEBUG oslo_vmware.api [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618293, 'name': RemoveSnapshot_Task, 'duration_secs': 1.513356} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.704472] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Deleted Snapshot of the VM instance {{(pid=70020) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 838.704704] env[70020]: INFO nova.compute.manager [None req-1de3ea1b-2add-4677-9fe5-9c280acc2109 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Took 18.53 seconds to snapshot the instance on the hypervisor. [ 838.774795] env[70020]: INFO nova.compute.manager [-] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Took 1.30 seconds to deallocate network for instance. [ 838.884198] env[70020]: DEBUG nova.compute.utils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 838.887256] env[70020]: DEBUG nova.compute.manager [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 838.887475] env[70020]: DEBUG nova.network.neutron [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 838.891498] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-84b47043-8591-4bb1-a621-67ff3b2d6710 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 838.891706] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-84b47043-8591-4bb1-a621-67ff3b2d6710 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 838.891933] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-84b47043-8591-4bb1-a621-67ff3b2d6710 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Deleting the datastore file [datastore2] 29d41731-4ae2-4cc4-bfda-b7356922c8ff {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 838.893375] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6b6f5fce-5935-43a9-a00e-df8337a408a2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.900985] env[70020]: DEBUG oslo_vmware.api [None req-84b47043-8591-4bb1-a621-67ff3b2d6710 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for the task: (returnval){ [ 838.900985] env[70020]: value = "task-3618297" [ 838.900985] env[70020]: _type = "Task" [ 838.900985] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.910347] env[70020]: DEBUG oslo_vmware.api [None req-84b47043-8591-4bb1-a621-67ff3b2d6710 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618297, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.945318] env[70020]: INFO nova.compute.manager [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Took 37.90 seconds to build instance. [ 838.952984] env[70020]: DEBUG nova.policy [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f327ea11ea09400b912db630dafe6c31', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0986c957c04b40e2a8e252212abadb4c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 839.151551] env[70020]: DEBUG nova.compute.manager [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 839.195256] env[70020]: DEBUG nova.network.neutron [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 839.286140] env[70020]: DEBUG oslo_concurrency.lockutils [None req-dcecbce0-59b1-48cc-9bcb-69e1ee765806 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 839.308726] env[70020]: DEBUG nova.network.neutron [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Successfully created port: e4568496-25b6-4661-bb65-1608ffd75212 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 839.395084] env[70020]: DEBUG nova.compute.manager [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 839.413515] env[70020]: DEBUG oslo_vmware.api [None req-84b47043-8591-4bb1-a621-67ff3b2d6710 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618297, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.201469} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.413746] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-84b47043-8591-4bb1-a621-67ff3b2d6710 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 839.413922] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-84b47043-8591-4bb1-a621-67ff3b2d6710 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 839.414121] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-84b47043-8591-4bb1-a621-67ff3b2d6710 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 839.414293] env[70020]: INFO nova.compute.manager [None req-84b47043-8591-4bb1-a621-67ff3b2d6710 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Took 1.63 seconds to destroy the instance on the hypervisor. [ 839.414521] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-84b47043-8591-4bb1-a621-67ff3b2d6710 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 839.414699] env[70020]: DEBUG nova.compute.manager [-] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 839.414789] env[70020]: DEBUG nova.network.neutron [-] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 839.451999] env[70020]: DEBUG oslo_concurrency.lockutils [None req-81666243-59cf-40f2-8b78-d20857eeec27 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Lock "b53f55c1-1867-410c-9c53-f552ff30d697" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.485s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 839.479724] env[70020]: DEBUG nova.network.neutron [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Updating instance_info_cache with network_info: [{"id": "a4cb005b-e1cb-4efe-9272-a7adbf42d5bc", "address": "fa:16:3e:a7:49:95", "network": {"id": "6de6b9b1-ad1b-4051-9e62-1e2b930a9a3c", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-325447579-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0986c957c04b40e2a8e252212abadb4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96fdbb91-eb49-4dbf-b234-5b38503d7589", "external-id": "nsx-vlan-transportzone-392", "segmentation_id": 392, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4cb005b-e1", "ovs_interfaceid": "a4cb005b-e1cb-4efe-9272-a7adbf42d5bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.496509] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8958bc32-f1b2-460e-bb29-5aa67c8825c6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.507301] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ded85629-7a37-4096-a539-850f70962617 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.543628] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44af06ab-4153-4bc2-b7a9-594c5fc39ff1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.551608] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69e6c9b7-9849-4e92-82fb-537fbc0b7961 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.566399] env[70020]: DEBUG nova.compute.provider_tree [None req-9c7b241b-b181-42d5-8145-f051dc8a0875 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 839.597926] env[70020]: DEBUG nova.compute.manager [req-7c0ac0a4-8359-4072-8169-14bb8002d412 req-55ea6c42-7b67-450f-a67d-61aca994c6a8 service nova] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Received event network-changed-9f5cbd43-e9e7-4f35-b01e-e2e0dcbc1f5e {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 839.598147] env[70020]: DEBUG nova.compute.manager [req-7c0ac0a4-8359-4072-8169-14bb8002d412 req-55ea6c42-7b67-450f-a67d-61aca994c6a8 service nova] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Refreshing instance network info cache due to event network-changed-9f5cbd43-e9e7-4f35-b01e-e2e0dcbc1f5e. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 839.598506] env[70020]: DEBUG oslo_concurrency.lockutils [req-7c0ac0a4-8359-4072-8169-14bb8002d412 req-55ea6c42-7b67-450f-a67d-61aca994c6a8 service nova] Acquiring lock "refresh_cache-5c216231-afc5-41df-a243-bb2a17c20bfe" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.598506] env[70020]: DEBUG oslo_concurrency.lockutils [req-7c0ac0a4-8359-4072-8169-14bb8002d412 req-55ea6c42-7b67-450f-a67d-61aca994c6a8 service nova] Acquired lock "refresh_cache-5c216231-afc5-41df-a243-bb2a17c20bfe" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 839.598686] env[70020]: DEBUG nova.network.neutron [req-7c0ac0a4-8359-4072-8169-14bb8002d412 req-55ea6c42-7b67-450f-a67d-61aca994c6a8 service nova] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Refreshing network info cache for port 9f5cbd43-e9e7-4f35-b01e-e2e0dcbc1f5e {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 839.677608] env[70020]: DEBUG oslo_concurrency.lockutils [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 839.874977] env[70020]: DEBUG nova.compute.manager [req-bb138602-230e-4636-a8ec-61e14e3696d9 req-efe896fc-9d73-4822-9ffd-ccfb9480476d service nova] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Received event network-vif-plugged-a4cb005b-e1cb-4efe-9272-a7adbf42d5bc {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 839.875323] env[70020]: DEBUG oslo_concurrency.lockutils [req-bb138602-230e-4636-a8ec-61e14e3696d9 req-efe896fc-9d73-4822-9ffd-ccfb9480476d service nova] Acquiring lock "f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 839.875584] env[70020]: DEBUG oslo_concurrency.lockutils [req-bb138602-230e-4636-a8ec-61e14e3696d9 req-efe896fc-9d73-4822-9ffd-ccfb9480476d service nova] Lock "f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 839.875767] env[70020]: DEBUG oslo_concurrency.lockutils [req-bb138602-230e-4636-a8ec-61e14e3696d9 req-efe896fc-9d73-4822-9ffd-ccfb9480476d service nova] Lock "f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 839.875947] env[70020]: DEBUG nova.compute.manager [req-bb138602-230e-4636-a8ec-61e14e3696d9 req-efe896fc-9d73-4822-9ffd-ccfb9480476d service nova] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] No waiting events found dispatching network-vif-plugged-a4cb005b-e1cb-4efe-9272-a7adbf42d5bc {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 839.876133] env[70020]: WARNING nova.compute.manager [req-bb138602-230e-4636-a8ec-61e14e3696d9 req-efe896fc-9d73-4822-9ffd-ccfb9480476d service nova] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Received unexpected event network-vif-plugged-a4cb005b-e1cb-4efe-9272-a7adbf42d5bc for instance with vm_state building and task_state spawning. [ 839.876299] env[70020]: DEBUG nova.compute.manager [req-bb138602-230e-4636-a8ec-61e14e3696d9 req-efe896fc-9d73-4822-9ffd-ccfb9480476d service nova] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Received event network-changed-a4cb005b-e1cb-4efe-9272-a7adbf42d5bc {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 839.876461] env[70020]: DEBUG nova.compute.manager [req-bb138602-230e-4636-a8ec-61e14e3696d9 req-efe896fc-9d73-4822-9ffd-ccfb9480476d service nova] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Refreshing instance network info cache due to event network-changed-a4cb005b-e1cb-4efe-9272-a7adbf42d5bc. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 839.876964] env[70020]: DEBUG oslo_concurrency.lockutils [req-bb138602-230e-4636-a8ec-61e14e3696d9 req-efe896fc-9d73-4822-9ffd-ccfb9480476d service nova] Acquiring lock "refresh_cache-f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.954531] env[70020]: DEBUG nova.compute.manager [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 839.984198] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Releasing lock "refresh_cache-f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 839.984603] env[70020]: DEBUG nova.compute.manager [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Instance network_info: |[{"id": "a4cb005b-e1cb-4efe-9272-a7adbf42d5bc", "address": "fa:16:3e:a7:49:95", "network": {"id": "6de6b9b1-ad1b-4051-9e62-1e2b930a9a3c", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-325447579-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0986c957c04b40e2a8e252212abadb4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96fdbb91-eb49-4dbf-b234-5b38503d7589", "external-id": "nsx-vlan-transportzone-392", "segmentation_id": 392, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4cb005b-e1", "ovs_interfaceid": "a4cb005b-e1cb-4efe-9272-a7adbf42d5bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 839.985315] env[70020]: DEBUG oslo_concurrency.lockutils [req-bb138602-230e-4636-a8ec-61e14e3696d9 req-efe896fc-9d73-4822-9ffd-ccfb9480476d service nova] Acquired lock "refresh_cache-f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 839.985561] env[70020]: DEBUG nova.network.neutron [req-bb138602-230e-4636-a8ec-61e14e3696d9 req-efe896fc-9d73-4822-9ffd-ccfb9480476d service nova] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Refreshing network info cache for port a4cb005b-e1cb-4efe-9272-a7adbf42d5bc {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 839.986890] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a7:49:95', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '96fdbb91-eb49-4dbf-b234-5b38503d7589', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a4cb005b-e1cb-4efe-9272-a7adbf42d5bc', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 839.996271] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Creating folder: Project (0986c957c04b40e2a8e252212abadb4c). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 840.000012] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b9cdfe44-8653-41ea-950a-df7f52b56123 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.017558] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Created folder: Project (0986c957c04b40e2a8e252212abadb4c) in parent group-v721521. [ 840.017866] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Creating folder: Instances. Parent ref: group-v721691. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 840.018167] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b69e1b3b-68e6-4d89-95af-365523240969 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.027778] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Created folder: Instances in parent group-v721691. [ 840.028028] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 840.028220] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 840.028435] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bcc25588-0ba3-426b-8ca9-9fb9fc967ceb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.054643] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 840.054643] env[70020]: value = "task-3618300" [ 840.054643] env[70020]: _type = "Task" [ 840.054643] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.064507] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618300, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.069555] env[70020]: DEBUG nova.scheduler.client.report [None req-9c7b241b-b181-42d5-8145-f051dc8a0875 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 840.303841] env[70020]: DEBUG nova.network.neutron [req-bb138602-230e-4636-a8ec-61e14e3696d9 req-efe896fc-9d73-4822-9ffd-ccfb9480476d service nova] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Updated VIF entry in instance network info cache for port a4cb005b-e1cb-4efe-9272-a7adbf42d5bc. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 840.304246] env[70020]: DEBUG nova.network.neutron [req-bb138602-230e-4636-a8ec-61e14e3696d9 req-efe896fc-9d73-4822-9ffd-ccfb9480476d service nova] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Updating instance_info_cache with network_info: [{"id": "a4cb005b-e1cb-4efe-9272-a7adbf42d5bc", "address": "fa:16:3e:a7:49:95", "network": {"id": "6de6b9b1-ad1b-4051-9e62-1e2b930a9a3c", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-325447579-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0986c957c04b40e2a8e252212abadb4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96fdbb91-eb49-4dbf-b234-5b38503d7589", "external-id": "nsx-vlan-transportzone-392", "segmentation_id": 392, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4cb005b-e1", "ovs_interfaceid": "a4cb005b-e1cb-4efe-9272-a7adbf42d5bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.305484] env[70020]: DEBUG nova.network.neutron [-] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.366638] env[70020]: DEBUG nova.network.neutron [req-7c0ac0a4-8359-4072-8169-14bb8002d412 req-55ea6c42-7b67-450f-a67d-61aca994c6a8 service nova] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Updated VIF entry in instance network info cache for port 9f5cbd43-e9e7-4f35-b01e-e2e0dcbc1f5e. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 840.367063] env[70020]: DEBUG nova.network.neutron [req-7c0ac0a4-8359-4072-8169-14bb8002d412 req-55ea6c42-7b67-450f-a67d-61aca994c6a8 service nova] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Updating instance_info_cache with network_info: [{"id": "9f5cbd43-e9e7-4f35-b01e-e2e0dcbc1f5e", "address": "fa:16:3e:7e:7f:6e", "network": {"id": "405de05e-83d0-46c5-9397-bb1ba00f7ab7", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-202335876-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.178", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16f59a8f930846ec9299416b9ec5dd48", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f5cbd43-e9", "ovs_interfaceid": "9f5cbd43-e9e7-4f35-b01e-e2e0dcbc1f5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.404839] env[70020]: DEBUG nova.compute.manager [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 840.438100] env[70020]: DEBUG nova.virt.hardware [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 840.438405] env[70020]: DEBUG nova.virt.hardware [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 840.438558] env[70020]: DEBUG nova.virt.hardware [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 840.438735] env[70020]: DEBUG nova.virt.hardware [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 840.438877] env[70020]: DEBUG nova.virt.hardware [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 840.439027] env[70020]: DEBUG nova.virt.hardware [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 840.439235] env[70020]: DEBUG nova.virt.hardware [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 840.439410] env[70020]: DEBUG nova.virt.hardware [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 840.439575] env[70020]: DEBUG nova.virt.hardware [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 840.439731] env[70020]: DEBUG nova.virt.hardware [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 840.439896] env[70020]: DEBUG nova.virt.hardware [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 840.440777] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ecd595f-4cdd-4b82-a0ac-3362cd67e75d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.449017] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df0064c2-b79e-4820-a7a3-23d0cf909dbd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.478912] env[70020]: DEBUG oslo_concurrency.lockutils [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 840.565159] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618300, 'name': CreateVM_Task, 'duration_secs': 0.353685} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.565352] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 840.566143] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.566332] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 840.567032] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 840.567032] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab44cd09-36e1-48aa-95d5-49c52fbec9c8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.571470] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Waiting for the task: (returnval){ [ 840.571470] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52a5a2a0-5c47-bca2-5696-d99a772af51d" [ 840.571470] env[70020]: _type = "Task" [ 840.571470] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.581192] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9c7b241b-b181-42d5-8145-f051dc8a0875 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.201s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 840.583191] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52a5a2a0-5c47-bca2-5696-d99a772af51d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.583666] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.229s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 840.583893] env[70020]: DEBUG nova.objects.instance [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lazy-loading 'resources' on Instance uuid c08166c5-2c31-4d40-a61c-c541924eb49c {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 840.598768] env[70020]: INFO nova.scheduler.client.report [None req-9c7b241b-b181-42d5-8145-f051dc8a0875 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Deleted allocations for instance 1f95bfa8-bc97-4ed7-8c33-c00297430bf5 [ 840.808545] env[70020]: DEBUG oslo_concurrency.lockutils [req-bb138602-230e-4636-a8ec-61e14e3696d9 req-efe896fc-9d73-4822-9ffd-ccfb9480476d service nova] Releasing lock "refresh_cache-f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 840.810170] env[70020]: INFO nova.compute.manager [-] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Took 1.40 seconds to deallocate network for instance. [ 840.871146] env[70020]: DEBUG oslo_concurrency.lockutils [req-7c0ac0a4-8359-4072-8169-14bb8002d412 req-55ea6c42-7b67-450f-a67d-61aca994c6a8 service nova] Releasing lock "refresh_cache-5c216231-afc5-41df-a243-bb2a17c20bfe" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 840.874639] env[70020]: INFO nova.compute.manager [None req-3a1fff17-b7a1-4e5a-b439-c3b87a06d95a tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Get console output [ 840.875328] env[70020]: WARNING nova.virt.vmwareapi.driver [None req-3a1fff17-b7a1-4e5a-b439-c3b87a06d95a tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] The console log is missing. Check your VSPC configuration [ 840.955263] env[70020]: DEBUG nova.network.neutron [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Successfully updated port: e4568496-25b6-4661-bb65-1608ffd75212 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 841.083113] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52a5a2a0-5c47-bca2-5696-d99a772af51d, 'name': SearchDatastore_Task, 'duration_secs': 0.01209} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.083771] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 841.083771] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 841.083906] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.084127] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 841.084418] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 841.084711] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-50dd68f8-c4c9-4466-a403-b775072a15db {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.096992] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 841.097194] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 841.097928] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e47618f9-5338-4a19-902b-0c5933a1b3a0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.105188] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Waiting for the task: (returnval){ [ 841.105188] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52892c08-ba66-0094-9247-3de4979ec2da" [ 841.105188] env[70020]: _type = "Task" [ 841.105188] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.109097] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9c7b241b-b181-42d5-8145-f051dc8a0875 tempest-VolumesAdminNegativeTest-973826376 tempest-VolumesAdminNegativeTest-973826376-project-member] Lock "1f95bfa8-bc97-4ed7-8c33-c00297430bf5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.930s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 841.117186] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52892c08-ba66-0094-9247-3de4979ec2da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.317271] env[70020]: DEBUG oslo_concurrency.lockutils [None req-84b47043-8591-4bb1-a621-67ff3b2d6710 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 841.375936] env[70020]: DEBUG nova.compute.manager [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Stashing vm_state: active {{(pid=70020) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 841.456665] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Acquiring lock "refresh_cache-ef0d716a-080e-4167-bd34-b2c660b95c88" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.456832] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Acquired lock "refresh_cache-ef0d716a-080e-4167-bd34-b2c660b95c88" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 841.456990] env[70020]: DEBUG nova.network.neutron [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 841.616681] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52892c08-ba66-0094-9247-3de4979ec2da, 'name': SearchDatastore_Task, 'duration_secs': 0.034169} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.621341] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c048eb88-5766-47f2-a8e4-0ae261aa69c2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.628840] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Waiting for the task: (returnval){ [ 841.628840] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52b8e2f1-e83a-083e-4c2b-a4941ff6d419" [ 841.628840] env[70020]: _type = "Task" [ 841.628840] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.638641] env[70020]: DEBUG nova.compute.manager [req-28ce9771-0f60-4a2b-986c-a2ebc9a4f7dd req-bdfdf075-d148-4248-af67-c1e14eed9776 service nova] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Received event network-vif-deleted-f1554235-9a1a-4e1a-9f15-a47bfe87eddd {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 841.642486] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b8e2f1-e83a-083e-4c2b-a4941ff6d419, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.677690] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-007b3c25-c73d-47ca-977c-640941078456 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.688012] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff6897f0-2b6b-4684-bbbd-5832210425f4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.723621] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecac5c44-3222-4a07-81e1-23ccdb1acd91 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.731687] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80511a59-716f-485c-a797-c39ad5eb829b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.747464] env[70020]: DEBUG nova.compute.provider_tree [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 841.902391] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 841.952188] env[70020]: DEBUG nova.compute.manager [req-75816b2e-bd39-41a5-b3ae-1ce0916fc339 req-7970c61e-9b0c-4d6c-84c9-1ac30c66423b service nova] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Received event network-vif-plugged-e4568496-25b6-4661-bb65-1608ffd75212 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 841.952423] env[70020]: DEBUG oslo_concurrency.lockutils [req-75816b2e-bd39-41a5-b3ae-1ce0916fc339 req-7970c61e-9b0c-4d6c-84c9-1ac30c66423b service nova] Acquiring lock "ef0d716a-080e-4167-bd34-b2c660b95c88-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 841.952628] env[70020]: DEBUG oslo_concurrency.lockutils [req-75816b2e-bd39-41a5-b3ae-1ce0916fc339 req-7970c61e-9b0c-4d6c-84c9-1ac30c66423b service nova] Lock "ef0d716a-080e-4167-bd34-b2c660b95c88-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 841.952796] env[70020]: DEBUG oslo_concurrency.lockutils [req-75816b2e-bd39-41a5-b3ae-1ce0916fc339 req-7970c61e-9b0c-4d6c-84c9-1ac30c66423b service nova] Lock "ef0d716a-080e-4167-bd34-b2c660b95c88-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 841.952957] env[70020]: DEBUG nova.compute.manager [req-75816b2e-bd39-41a5-b3ae-1ce0916fc339 req-7970c61e-9b0c-4d6c-84c9-1ac30c66423b service nova] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] No waiting events found dispatching network-vif-plugged-e4568496-25b6-4661-bb65-1608ffd75212 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 841.953409] env[70020]: WARNING nova.compute.manager [req-75816b2e-bd39-41a5-b3ae-1ce0916fc339 req-7970c61e-9b0c-4d6c-84c9-1ac30c66423b service nova] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Received unexpected event network-vif-plugged-e4568496-25b6-4661-bb65-1608ffd75212 for instance with vm_state building and task_state spawning. [ 841.953648] env[70020]: DEBUG nova.compute.manager [req-75816b2e-bd39-41a5-b3ae-1ce0916fc339 req-7970c61e-9b0c-4d6c-84c9-1ac30c66423b service nova] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Received event network-changed-e4568496-25b6-4661-bb65-1608ffd75212 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 841.953844] env[70020]: DEBUG nova.compute.manager [req-75816b2e-bd39-41a5-b3ae-1ce0916fc339 req-7970c61e-9b0c-4d6c-84c9-1ac30c66423b service nova] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Refreshing instance network info cache due to event network-changed-e4568496-25b6-4661-bb65-1608ffd75212. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 841.954111] env[70020]: DEBUG oslo_concurrency.lockutils [req-75816b2e-bd39-41a5-b3ae-1ce0916fc339 req-7970c61e-9b0c-4d6c-84c9-1ac30c66423b service nova] Acquiring lock "refresh_cache-ef0d716a-080e-4167-bd34-b2c660b95c88" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.976916] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3b3e825c-71b1-4944-b679-2645c48ce2c0 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Acquiring lock "bb4e4986-af2a-4832-9ec7-777bca863dce" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 841.977166] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3b3e825c-71b1-4944-b679-2645c48ce2c0 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Lock "bb4e4986-af2a-4832-9ec7-777bca863dce" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 841.977398] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3b3e825c-71b1-4944-b679-2645c48ce2c0 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Acquiring lock "bb4e4986-af2a-4832-9ec7-777bca863dce-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 841.977684] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3b3e825c-71b1-4944-b679-2645c48ce2c0 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Lock "bb4e4986-af2a-4832-9ec7-777bca863dce-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 841.977803] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3b3e825c-71b1-4944-b679-2645c48ce2c0 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Lock "bb4e4986-af2a-4832-9ec7-777bca863dce-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 841.979664] env[70020]: INFO nova.compute.manager [None req-3b3e825c-71b1-4944-b679-2645c48ce2c0 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Terminating instance [ 842.011844] env[70020]: DEBUG nova.network.neutron [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 842.140058] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b8e2f1-e83a-083e-4c2b-a4941ff6d419, 'name': SearchDatastore_Task, 'duration_secs': 0.011477} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.140372] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 842.140634] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1/f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 842.140902] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-34b5b4e0-48f2-43b8-83d8-439f0bf4168f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.147594] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Waiting for the task: (returnval){ [ 842.147594] env[70020]: value = "task-3618301" [ 842.147594] env[70020]: _type = "Task" [ 842.147594] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.155439] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618301, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.243404] env[70020]: DEBUG nova.network.neutron [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Updating instance_info_cache with network_info: [{"id": "e4568496-25b6-4661-bb65-1608ffd75212", "address": "fa:16:3e:33:09:3a", "network": {"id": "6de6b9b1-ad1b-4051-9e62-1e2b930a9a3c", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-325447579-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0986c957c04b40e2a8e252212abadb4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96fdbb91-eb49-4dbf-b234-5b38503d7589", "external-id": "nsx-vlan-transportzone-392", "segmentation_id": 392, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4568496-25", "ovs_interfaceid": "e4568496-25b6-4661-bb65-1608ffd75212", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.251471] env[70020]: DEBUG nova.scheduler.client.report [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 842.483476] env[70020]: DEBUG nova.compute.manager [None req-3b3e825c-71b1-4944-b679-2645c48ce2c0 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 842.483833] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3b3e825c-71b1-4944-b679-2645c48ce2c0 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 842.484684] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b0e05ed-b3e8-4d24-9b43-67b714b5c139 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.493606] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b3e825c-71b1-4944-b679-2645c48ce2c0 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 842.493959] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-373f1137-7490-4b19-bf5c-4c1502eff12c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.501207] env[70020]: DEBUG oslo_vmware.api [None req-3b3e825c-71b1-4944-b679-2645c48ce2c0 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Waiting for the task: (returnval){ [ 842.501207] env[70020]: value = "task-3618302" [ 842.501207] env[70020]: _type = "Task" [ 842.501207] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.509879] env[70020]: DEBUG oslo_vmware.api [None req-3b3e825c-71b1-4944-b679-2645c48ce2c0 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Task: {'id': task-3618302, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.657547] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618301, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.495321} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.657837] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1/f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 842.658065] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 842.658319] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9e4b482a-4120-4adf-b667-30fdf52b4732 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.670027] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Waiting for the task: (returnval){ [ 842.670027] env[70020]: value = "task-3618303" [ 842.670027] env[70020]: _type = "Task" [ 842.670027] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.679203] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618303, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.746121] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Releasing lock "refresh_cache-ef0d716a-080e-4167-bd34-b2c660b95c88" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 842.747033] env[70020]: DEBUG nova.compute.manager [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Instance network_info: |[{"id": "e4568496-25b6-4661-bb65-1608ffd75212", "address": "fa:16:3e:33:09:3a", "network": {"id": "6de6b9b1-ad1b-4051-9e62-1e2b930a9a3c", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-325447579-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0986c957c04b40e2a8e252212abadb4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96fdbb91-eb49-4dbf-b234-5b38503d7589", "external-id": "nsx-vlan-transportzone-392", "segmentation_id": 392, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4568496-25", "ovs_interfaceid": "e4568496-25b6-4661-bb65-1608ffd75212", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 842.747033] env[70020]: DEBUG oslo_concurrency.lockutils [req-75816b2e-bd39-41a5-b3ae-1ce0916fc339 req-7970c61e-9b0c-4d6c-84c9-1ac30c66423b service nova] Acquired lock "refresh_cache-ef0d716a-080e-4167-bd34-b2c660b95c88" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 842.747033] env[70020]: DEBUG nova.network.neutron [req-75816b2e-bd39-41a5-b3ae-1ce0916fc339 req-7970c61e-9b0c-4d6c-84c9-1ac30c66423b service nova] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Refreshing network info cache for port e4568496-25b6-4661-bb65-1608ffd75212 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 842.748294] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:33:09:3a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '96fdbb91-eb49-4dbf-b234-5b38503d7589', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e4568496-25b6-4661-bb65-1608ffd75212', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 842.759330] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 842.763835] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.180s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 842.766153] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 842.768668] env[70020]: DEBUG oslo_concurrency.lockutils [None req-97b038d7-5c97-459a-bfd3-232826b9f6ba tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.407s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 842.768918] env[70020]: DEBUG nova.objects.instance [None req-97b038d7-5c97-459a-bfd3-232826b9f6ba tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Lazy-loading 'resources' on Instance uuid 36f15b0a-d57f-49d8-9510-1036e889a438 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 842.770808] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6589d5cf-275f-46d5-81cd-d55b52c54eb0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.802905] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 842.802905] env[70020]: value = "task-3618304" [ 842.802905] env[70020]: _type = "Task" [ 842.802905] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.811574] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618304, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.812782] env[70020]: INFO nova.scheduler.client.report [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Deleted allocations for instance c08166c5-2c31-4d40-a61c-c541924eb49c [ 843.012781] env[70020]: DEBUG oslo_vmware.api [None req-3b3e825c-71b1-4944-b679-2645c48ce2c0 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Task: {'id': task-3618302, 'name': PowerOffVM_Task, 'duration_secs': 0.293254} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.013865] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b3e825c-71b1-4944-b679-2645c48ce2c0 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 843.014139] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3b3e825c-71b1-4944-b679-2645c48ce2c0 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 843.014417] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eb97ee8e-da22-4897-af74-37b514df7c90 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.092589] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3b3e825c-71b1-4944-b679-2645c48ce2c0 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 843.092804] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3b3e825c-71b1-4944-b679-2645c48ce2c0 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 843.092988] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b3e825c-71b1-4944-b679-2645c48ce2c0 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Deleting the datastore file [datastore1] bb4e4986-af2a-4832-9ec7-777bca863dce {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 843.093376] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d3d1c449-3984-4876-b182-bf0623e96fc6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.100258] env[70020]: DEBUG oslo_vmware.api [None req-3b3e825c-71b1-4944-b679-2645c48ce2c0 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Waiting for the task: (returnval){ [ 843.100258] env[70020]: value = "task-3618306" [ 843.100258] env[70020]: _type = "Task" [ 843.100258] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.109053] env[70020]: DEBUG oslo_vmware.api [None req-3b3e825c-71b1-4944-b679-2645c48ce2c0 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Task: {'id': task-3618306, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.137186] env[70020]: DEBUG nova.network.neutron [req-75816b2e-bd39-41a5-b3ae-1ce0916fc339 req-7970c61e-9b0c-4d6c-84c9-1ac30c66423b service nova] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Updated VIF entry in instance network info cache for port e4568496-25b6-4661-bb65-1608ffd75212. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 843.137556] env[70020]: DEBUG nova.network.neutron [req-75816b2e-bd39-41a5-b3ae-1ce0916fc339 req-7970c61e-9b0c-4d6c-84c9-1ac30c66423b service nova] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Updating instance_info_cache with network_info: [{"id": "e4568496-25b6-4661-bb65-1608ffd75212", "address": "fa:16:3e:33:09:3a", "network": {"id": "6de6b9b1-ad1b-4051-9e62-1e2b930a9a3c", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-325447579-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0986c957c04b40e2a8e252212abadb4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96fdbb91-eb49-4dbf-b234-5b38503d7589", "external-id": "nsx-vlan-transportzone-392", "segmentation_id": 392, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4568496-25", "ovs_interfaceid": "e4568496-25b6-4661-bb65-1608ffd75212", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.179928] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618303, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084225} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.180255] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 843.181138] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1fa8c44-a1f3-4b58-892e-62db77155b33 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.207925] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1/f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 843.208722] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0308cc87-94c6-43c0-8bce-62699b8576d6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.237998] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Waiting for the task: (returnval){ [ 843.237998] env[70020]: value = "task-3618307" [ 843.237998] env[70020]: _type = "Task" [ 843.237998] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.248050] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618307, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.314153] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618304, 'name': CreateVM_Task, 'duration_secs': 0.452742} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.314287] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 843.315143] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.318890] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 843.318890] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 843.318890] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e20cfdb-1b1b-4615-8a45-5465f4c30f0d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.324709] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fa634777-fa41-4b34-a6a5-0da2c1afa42e tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "c08166c5-2c31-4d40-a61c-c541924eb49c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.872s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 843.329186] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Waiting for the task: (returnval){ [ 843.329186] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52d01f41-a76d-d297-09c6-dfbc7aa065c9" [ 843.329186] env[70020]: _type = "Task" [ 843.329186] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.342040] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52d01f41-a76d-d297-09c6-dfbc7aa065c9, 'name': SearchDatastore_Task, 'duration_secs': 0.010041} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.342439] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 843.342683] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 843.342920] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.343171] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 843.343417] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 843.343691] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b9e93f82-7f0a-490f-ab04-29709c72f9dc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.358894] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 843.359453] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 843.362759] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75e9c1c9-d6c4-4afb-83fb-4dc3ac60c018 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.370461] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Waiting for the task: (returnval){ [ 843.370461] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]528a0d31-f148-d4ef-b7ea-6cb19c35cacd" [ 843.370461] env[70020]: _type = "Task" [ 843.370461] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.381051] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]528a0d31-f148-d4ef-b7ea-6cb19c35cacd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.612934] env[70020]: DEBUG oslo_vmware.api [None req-3b3e825c-71b1-4944-b679-2645c48ce2c0 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Task: {'id': task-3618306, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.22502} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.616209] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b3e825c-71b1-4944-b679-2645c48ce2c0 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 843.616209] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3b3e825c-71b1-4944-b679-2645c48ce2c0 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 843.616209] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3b3e825c-71b1-4944-b679-2645c48ce2c0 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 843.616611] env[70020]: INFO nova.compute.manager [None req-3b3e825c-71b1-4944-b679-2645c48ce2c0 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Took 1.13 seconds to destroy the instance on the hypervisor. [ 843.616611] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3b3e825c-71b1-4944-b679-2645c48ce2c0 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 843.617060] env[70020]: DEBUG nova.compute.manager [-] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 843.618163] env[70020]: DEBUG nova.network.neutron [-] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 843.641073] env[70020]: DEBUG oslo_concurrency.lockutils [req-75816b2e-bd39-41a5-b3ae-1ce0916fc339 req-7970c61e-9b0c-4d6c-84c9-1ac30c66423b service nova] Releasing lock "refresh_cache-ef0d716a-080e-4167-bd34-b2c660b95c88" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 843.750767] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618307, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.882322] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]528a0d31-f148-d4ef-b7ea-6cb19c35cacd, 'name': SearchDatastore_Task, 'duration_secs': 0.010234} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.884576] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea1fec02-a337-4b81-9bcf-e7b3d31309d3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.887298] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c380f74d-f812-440a-85fa-0733d45948ff {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.897593] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Waiting for the task: (returnval){ [ 843.897593] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52718022-0d23-aee3-18df-d8b1bff4381c" [ 843.897593] env[70020]: _type = "Task" [ 843.897593] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.899119] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65765724-84e1-4d33-b28d-677cdbca3ebd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.920180] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52718022-0d23-aee3-18df-d8b1bff4381c, 'name': SearchDatastore_Task, 'duration_secs': 0.017313} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.947851] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 843.947851] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] ef0d716a-080e-4167-bd34-b2c660b95c88/ef0d716a-080e-4167-bd34-b2c660b95c88.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 843.947851] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ac55ead9-0311-4e8a-a2fc-2997be7b2caa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.949206] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3f56a5a-79a6-4e64-bb3c-063203381dea {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.965023] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9116ea89-439c-42b5-b03f-a4c816f64bd9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.967162] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Waiting for the task: (returnval){ [ 843.967162] env[70020]: value = "task-3618308" [ 843.967162] env[70020]: _type = "Task" [ 843.967162] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.979674] env[70020]: DEBUG nova.compute.provider_tree [None req-97b038d7-5c97-459a-bfd3-232826b9f6ba tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 843.989657] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618308, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.256827] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618307, 'name': ReconfigVM_Task, 'duration_secs': 0.810405} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.256827] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Reconfigured VM instance instance-00000039 to attach disk [datastore1] f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1/f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 844.257555] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-19c8d578-1722-4a8f-972f-adc9c3797a2a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.268698] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Waiting for the task: (returnval){ [ 844.268698] env[70020]: value = "task-3618309" [ 844.268698] env[70020]: _type = "Task" [ 844.268698] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.280922] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618309, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.487529] env[70020]: DEBUG nova.scheduler.client.report [None req-97b038d7-5c97-459a-bfd3-232826b9f6ba tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 844.491663] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618308, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.572647] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5ad0a96a-76fd-47d3-8106-aafa3e80c662 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "2e9f4ece-0203-4816-a045-447822207697" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 844.576054] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5ad0a96a-76fd-47d3-8106-aafa3e80c662 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "2e9f4ece-0203-4816-a045-447822207697" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 844.691931] env[70020]: DEBUG nova.compute.manager [req-15709f6d-9aa0-4354-a18f-ccc3eb9f2a65 req-e80b6ab9-51ff-4430-85c1-34ea303a16dc service nova] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Received event network-vif-deleted-3af5d84e-e814-4689-aa70-e63d58041799 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 844.692688] env[70020]: INFO nova.compute.manager [req-15709f6d-9aa0-4354-a18f-ccc3eb9f2a65 req-e80b6ab9-51ff-4430-85c1-34ea303a16dc service nova] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Neutron deleted interface 3af5d84e-e814-4689-aa70-e63d58041799; detaching it from the instance and deleting it from the info cache [ 844.693024] env[70020]: DEBUG nova.network.neutron [req-15709f6d-9aa0-4354-a18f-ccc3eb9f2a65 req-e80b6ab9-51ff-4430-85c1-34ea303a16dc service nova] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.770374] env[70020]: DEBUG nova.network.neutron [-] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.784237] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618309, 'name': Rename_Task, 'duration_secs': 0.256492} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.784810] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 844.784810] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-068ea072-df3e-4b8b-85d1-472924c35c7a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.793943] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Waiting for the task: (returnval){ [ 844.793943] env[70020]: value = "task-3618310" [ 844.793943] env[70020]: _type = "Task" [ 844.793943] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.803833] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618310, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.985608] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618308, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.82491} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.985608] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] ef0d716a-080e-4167-bd34-b2c660b95c88/ef0d716a-080e-4167-bd34-b2c660b95c88.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 844.985878] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 844.986420] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-24e797c8-76d6-4891-ae2d-154ae86abdf9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.993811] env[70020]: DEBUG oslo_concurrency.lockutils [None req-97b038d7-5c97-459a-bfd3-232826b9f6ba tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.225s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 844.996827] env[70020]: DEBUG oslo_concurrency.lockutils [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 32.794s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 844.997104] env[70020]: DEBUG nova.objects.instance [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Trying to apply a migration context that does not seem to be set for this instance {{(pid=70020) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 845.004726] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Waiting for the task: (returnval){ [ 845.004726] env[70020]: value = "task-3618311" [ 845.004726] env[70020]: _type = "Task" [ 845.004726] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.019126] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618311, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.034451] env[70020]: INFO nova.scheduler.client.report [None req-97b038d7-5c97-459a-bfd3-232826b9f6ba tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Deleted allocations for instance 36f15b0a-d57f-49d8-9510-1036e889a438 [ 845.197509] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d27cd281-0fea-4366-8144-83ceff328395 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.214649] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8877fc5-cc23-47e1-b126-db5b2e0ec296 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.274393] env[70020]: DEBUG nova.compute.manager [req-15709f6d-9aa0-4354-a18f-ccc3eb9f2a65 req-e80b6ab9-51ff-4430-85c1-34ea303a16dc service nova] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Detach interface failed, port_id=3af5d84e-e814-4689-aa70-e63d58041799, reason: Instance bb4e4986-af2a-4832-9ec7-777bca863dce could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 845.278264] env[70020]: INFO nova.compute.manager [-] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Took 1.66 seconds to deallocate network for instance. [ 845.307642] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618310, 'name': PowerOnVM_Task} progress is 78%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.530633] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618311, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.2334} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.532096] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 845.533178] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9809ae19-90aa-436f-96ec-42376363f25e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.555202] env[70020]: DEBUG oslo_concurrency.lockutils [None req-97b038d7-5c97-459a-bfd3-232826b9f6ba tempest-ServerGroupTestJSON-297588656 tempest-ServerGroupTestJSON-297588656-project-member] Lock "36f15b0a-d57f-49d8-9510-1036e889a438" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.795s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 845.566795] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Reconfiguring VM instance instance-0000003a to attach disk [datastore2] ef0d716a-080e-4167-bd34-b2c660b95c88/ef0d716a-080e-4167-bd34-b2c660b95c88.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 845.567739] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f147c716-c3d7-4ef7-a9b2-aa9fec7c8247 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.593164] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Waiting for the task: (returnval){ [ 845.593164] env[70020]: value = "task-3618312" [ 845.593164] env[70020]: _type = "Task" [ 845.593164] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.604535] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618312, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.784998] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3b3e825c-71b1-4944-b679-2645c48ce2c0 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 845.808433] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618310, 'name': PowerOnVM_Task, 'duration_secs': 0.985809} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.809710] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 845.809710] env[70020]: INFO nova.compute.manager [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Took 8.03 seconds to spawn the instance on the hypervisor. [ 845.809710] env[70020]: DEBUG nova.compute.manager [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 845.810392] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a0baf57-e59b-43ba-8f5f-faac193e156a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.018202] env[70020]: DEBUG oslo_concurrency.lockutils [None req-77604d39-7c4b-482e-8de7-d98c527d4fd5 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.020s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 846.018606] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.240s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 846.027361] env[70020]: INFO nova.compute.claims [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 846.105115] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618312, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.166301] env[70020]: DEBUG oslo_concurrency.lockutils [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "a8982c31-ea86-4a8d-b8c6-006263ef41f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 846.166535] env[70020]: DEBUG oslo_concurrency.lockutils [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "a8982c31-ea86-4a8d-b8c6-006263ef41f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 846.246664] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b27d42c4-bc8f-41a2-a919-773374328e54 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Acquiring lock "08ce6bc8-30fe-4c63-80e1-26c84ae75702" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 846.247028] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b27d42c4-bc8f-41a2-a919-773374328e54 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "08ce6bc8-30fe-4c63-80e1-26c84ae75702" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 846.247305] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b27d42c4-bc8f-41a2-a919-773374328e54 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Acquiring lock "08ce6bc8-30fe-4c63-80e1-26c84ae75702-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 846.247562] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b27d42c4-bc8f-41a2-a919-773374328e54 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "08ce6bc8-30fe-4c63-80e1-26c84ae75702-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 846.247796] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b27d42c4-bc8f-41a2-a919-773374328e54 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "08ce6bc8-30fe-4c63-80e1-26c84ae75702-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 846.250014] env[70020]: INFO nova.compute.manager [None req-b27d42c4-bc8f-41a2-a919-773374328e54 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Terminating instance [ 846.331211] env[70020]: INFO nova.compute.manager [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Took 40.86 seconds to build instance. [ 846.604794] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618312, 'name': ReconfigVM_Task, 'duration_secs': 0.9201} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.606221] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Reconfigured VM instance instance-0000003a to attach disk [datastore2] ef0d716a-080e-4167-bd34-b2c660b95c88/ef0d716a-080e-4167-bd34-b2c660b95c88.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 846.606221] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a22d0237-072f-438c-869c-6f14f335d842 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.615160] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Waiting for the task: (returnval){ [ 846.615160] env[70020]: value = "task-3618313" [ 846.615160] env[70020]: _type = "Task" [ 846.615160] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.627718] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618313, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.755512] env[70020]: DEBUG nova.compute.manager [None req-b27d42c4-bc8f-41a2-a919-773374328e54 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 846.757552] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b27d42c4-bc8f-41a2-a919-773374328e54 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 846.757552] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cdfde04-d915-4724-bfe6-33f143e6d1e3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.768157] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b27d42c4-bc8f-41a2-a919-773374328e54 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 846.768306] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-80eee54e-7f25-4b86-aa82-85cfb73498f4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.779050] env[70020]: DEBUG oslo_vmware.api [None req-b27d42c4-bc8f-41a2-a919-773374328e54 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for the task: (returnval){ [ 846.779050] env[70020]: value = "task-3618314" [ 846.779050] env[70020]: _type = "Task" [ 846.779050] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.788421] env[70020]: DEBUG oslo_vmware.api [None req-b27d42c4-bc8f-41a2-a919-773374328e54 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618314, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.834807] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Lock "f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.602s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 847.126756] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618313, 'name': Rename_Task, 'duration_secs': 0.164668} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.129815] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 847.130356] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4f57154d-6c32-4d7d-b045-231d515f165a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.140597] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Waiting for the task: (returnval){ [ 847.140597] env[70020]: value = "task-3618315" [ 847.140597] env[70020]: _type = "Task" [ 847.140597] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.156760] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618315, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.300809] env[70020]: DEBUG oslo_vmware.api [None req-b27d42c4-bc8f-41a2-a919-773374328e54 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618314, 'name': PowerOffVM_Task, 'duration_secs': 0.456876} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.304539] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b27d42c4-bc8f-41a2-a919-773374328e54 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 847.304764] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b27d42c4-bc8f-41a2-a919-773374328e54 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 847.307704] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e89e812c-13ed-4993-bced-5c82b8fc4988 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.341713] env[70020]: DEBUG nova.compute.manager [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 847.394152] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b27d42c4-bc8f-41a2-a919-773374328e54 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 847.394445] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b27d42c4-bc8f-41a2-a919-773374328e54 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 847.394686] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-b27d42c4-bc8f-41a2-a919-773374328e54 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Deleting the datastore file [datastore2] 08ce6bc8-30fe-4c63-80e1-26c84ae75702 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 847.397061] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-53d95d3b-0dcb-42a3-bd30-f5c3012e0e65 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.403774] env[70020]: DEBUG oslo_vmware.api [None req-b27d42c4-bc8f-41a2-a919-773374328e54 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for the task: (returnval){ [ 847.403774] env[70020]: value = "task-3618317" [ 847.403774] env[70020]: _type = "Task" [ 847.403774] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.415456] env[70020]: DEBUG oslo_vmware.api [None req-b27d42c4-bc8f-41a2-a919-773374328e54 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618317, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.656065] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618315, 'name': PowerOnVM_Task} progress is 98%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.682237] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-645aaed5-b49f-4ec2-a744-240d06f8ee26 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.693490] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-929ee1ce-76f6-4fb7-9b8e-0fbcf4b415ba {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.734633] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b424ef7f-eb86-4289-bf74-7e9576b4fde2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.744134] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1013bbb-1072-46f8-9a32-6df4c6032c23 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.759539] env[70020]: DEBUG nova.compute.provider_tree [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 847.871446] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 847.917898] env[70020]: DEBUG oslo_vmware.api [None req-b27d42c4-bc8f-41a2-a919-773374328e54 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618317, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.2028} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.917898] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-b27d42c4-bc8f-41a2-a919-773374328e54 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 847.918089] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b27d42c4-bc8f-41a2-a919-773374328e54 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 847.918154] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b27d42c4-bc8f-41a2-a919-773374328e54 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 847.918320] env[70020]: INFO nova.compute.manager [None req-b27d42c4-bc8f-41a2-a919-773374328e54 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Took 1.16 seconds to destroy the instance on the hypervisor. [ 847.918784] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b27d42c4-bc8f-41a2-a919-773374328e54 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 847.918863] env[70020]: DEBUG nova.compute.manager [-] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 847.918909] env[70020]: DEBUG nova.network.neutron [-] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 848.157128] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618315, 'name': PowerOnVM_Task, 'duration_secs': 0.599902} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.157364] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 848.157567] env[70020]: INFO nova.compute.manager [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Took 7.75 seconds to spawn the instance on the hypervisor. [ 848.157741] env[70020]: DEBUG nova.compute.manager [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 848.158613] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d726752-6085-4fe7-8ed8-7e85194d1df4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.263693] env[70020]: DEBUG nova.scheduler.client.report [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 848.519865] env[70020]: DEBUG nova.compute.manager [req-1993fbb4-2719-455d-9de0-4bcdea070e1b req-3d21a033-dba1-42f1-a854-67131e2907e1 service nova] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Received event network-vif-deleted-7cbd6812-9369-466e-a269-def6f4b8ed8f {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 848.520086] env[70020]: INFO nova.compute.manager [req-1993fbb4-2719-455d-9de0-4bcdea070e1b req-3d21a033-dba1-42f1-a854-67131e2907e1 service nova] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Neutron deleted interface 7cbd6812-9369-466e-a269-def6f4b8ed8f; detaching it from the instance and deleting it from the info cache [ 848.520259] env[70020]: DEBUG nova.network.neutron [req-1993fbb4-2719-455d-9de0-4bcdea070e1b req-3d21a033-dba1-42f1-a854-67131e2907e1 service nova] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.636472] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Acquiring lock "45926a02-d0fe-4274-ba47-b97b3e12e4cd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 848.636707] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Lock "45926a02-d0fe-4274-ba47-b97b3e12e4cd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 848.685801] env[70020]: INFO nova.compute.manager [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Took 41.18 seconds to build instance. [ 848.770306] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.752s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 848.774019] env[70020]: DEBUG nova.compute.manager [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 848.774307] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.614s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 848.777017] env[70020]: INFO nova.compute.claims [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 848.924764] env[70020]: DEBUG nova.network.neutron [-] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.024466] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-25294f3f-9696-492f-b2c7-4a578e1067ae {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.039055] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3f5955d-191a-4c19-be27-060b7c9f9079 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.087029] env[70020]: DEBUG nova.compute.manager [req-1993fbb4-2719-455d-9de0-4bcdea070e1b req-3d21a033-dba1-42f1-a854-67131e2907e1 service nova] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Detach interface failed, port_id=7cbd6812-9369-466e-a269-def6f4b8ed8f, reason: Instance 08ce6bc8-30fe-4c63-80e1-26c84ae75702 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 849.089106] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Acquiring lock "7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 849.089106] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Lock "7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 849.189855] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Lock "ef0d716a-080e-4167-bd34-b2c660b95c88" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.923s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 849.283897] env[70020]: DEBUG nova.compute.utils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 849.288235] env[70020]: DEBUG nova.compute.manager [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 849.288616] env[70020]: DEBUG nova.network.neutron [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 849.337468] env[70020]: DEBUG nova.policy [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f327ea11ea09400b912db630dafe6c31', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0986c957c04b40e2a8e252212abadb4c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 849.428037] env[70020]: INFO nova.compute.manager [-] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Took 1.51 seconds to deallocate network for instance. [ 849.439181] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Acquiring lock "edef9245-4048-4ea4-90cc-ebed54498d88" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 849.439375] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Lock "edef9245-4048-4ea4-90cc-ebed54498d88" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 849.606358] env[70020]: DEBUG nova.network.neutron [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Successfully created port: 36d75dbd-aa9f-46d8-ad64-f95577fdb5f6 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 849.679938] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Acquiring lock "ff4e958d-0068-429f-af76-5e7d4dd147f3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 849.680248] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Lock "ff4e958d-0068-429f-af76-5e7d4dd147f3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 849.690855] env[70020]: DEBUG nova.compute.manager [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 849.789042] env[70020]: DEBUG nova.compute.manager [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 849.940549] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b27d42c4-bc8f-41a2-a919-773374328e54 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 850.218889] env[70020]: DEBUG oslo_concurrency.lockutils [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 850.421011] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c61e7f54-cb14-4cc1-8851-209d91dcffc8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.429283] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e61c2e8-64fb-4bc0-a0bf-a93d6fd68a78 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.467899] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1224a946-4818-4084-907a-cec165e3ca85 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.478996] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47b92068-0df9-4ff5-ae0a-e848235a5d12 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.494323] env[70020]: DEBUG nova.compute.provider_tree [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 850.803629] env[70020]: DEBUG nova.compute.manager [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 850.834079] env[70020]: DEBUG nova.virt.hardware [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 850.834345] env[70020]: DEBUG nova.virt.hardware [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 850.834527] env[70020]: DEBUG nova.virt.hardware [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 850.834676] env[70020]: DEBUG nova.virt.hardware [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 850.834816] env[70020]: DEBUG nova.virt.hardware [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 850.834958] env[70020]: DEBUG nova.virt.hardware [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 850.835179] env[70020]: DEBUG nova.virt.hardware [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 850.835348] env[70020]: DEBUG nova.virt.hardware [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 850.835541] env[70020]: DEBUG nova.virt.hardware [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 850.835705] env[70020]: DEBUG nova.virt.hardware [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 850.835880] env[70020]: DEBUG nova.virt.hardware [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 850.836776] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37d8e97d-e4ea-4459-b8ea-0a3e9bab1e0e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.846175] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c23f28bc-53c0-419b-864d-e590f189f805 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.998047] env[70020]: DEBUG nova.scheduler.client.report [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 851.062381] env[70020]: DEBUG nova.compute.manager [req-c1dd81d1-9e1c-41fc-98cb-accc02908d5d req-d56e8e09-ad8c-447d-8678-e359dd6e4da0 service nova] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Received event network-vif-plugged-36d75dbd-aa9f-46d8-ad64-f95577fdb5f6 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 851.062724] env[70020]: DEBUG oslo_concurrency.lockutils [req-c1dd81d1-9e1c-41fc-98cb-accc02908d5d req-d56e8e09-ad8c-447d-8678-e359dd6e4da0 service nova] Acquiring lock "8bff6907-c2b0-4ad1-9298-b2d622d33fde-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 851.062938] env[70020]: DEBUG oslo_concurrency.lockutils [req-c1dd81d1-9e1c-41fc-98cb-accc02908d5d req-d56e8e09-ad8c-447d-8678-e359dd6e4da0 service nova] Lock "8bff6907-c2b0-4ad1-9298-b2d622d33fde-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 851.063123] env[70020]: DEBUG oslo_concurrency.lockutils [req-c1dd81d1-9e1c-41fc-98cb-accc02908d5d req-d56e8e09-ad8c-447d-8678-e359dd6e4da0 service nova] Lock "8bff6907-c2b0-4ad1-9298-b2d622d33fde-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 851.063284] env[70020]: DEBUG nova.compute.manager [req-c1dd81d1-9e1c-41fc-98cb-accc02908d5d req-d56e8e09-ad8c-447d-8678-e359dd6e4da0 service nova] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] No waiting events found dispatching network-vif-plugged-36d75dbd-aa9f-46d8-ad64-f95577fdb5f6 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 851.063443] env[70020]: WARNING nova.compute.manager [req-c1dd81d1-9e1c-41fc-98cb-accc02908d5d req-d56e8e09-ad8c-447d-8678-e359dd6e4da0 service nova] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Received unexpected event network-vif-plugged-36d75dbd-aa9f-46d8-ad64-f95577fdb5f6 for instance with vm_state building and task_state spawning. [ 851.156055] env[70020]: DEBUG nova.network.neutron [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Successfully updated port: 36d75dbd-aa9f-46d8-ad64-f95577fdb5f6 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 851.503459] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.729s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 851.504060] env[70020]: DEBUG nova.compute.manager [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 851.506758] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 31.631s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 851.506993] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 851.507092] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=70020) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 851.507396] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ec001563-4b07-487a-83d5-db05d72e8e1f tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.579s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 851.507601] env[70020]: DEBUG nova.objects.instance [None req-ec001563-4b07-487a-83d5-db05d72e8e1f tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Lazy-loading 'resources' on Instance uuid b99195a6-866e-4142-970a-42a0564889ef {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 851.509758] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80a87556-315b-42b2-b378-d00e12f02fcf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.522482] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5240bf61-2b40-4d43-a6ec-4a1419d30f35 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.538019] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d215e824-7740-49fd-afa1-58f592eaccf0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.547438] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd87f73c-3c03-4033-8ea2-fdfc00a42594 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.582585] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178999MB free_disk=75GB free_vcpus=48 pci_devices=None {{(pid=70020) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 851.582913] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 851.658391] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Acquiring lock "refresh_cache-8bff6907-c2b0-4ad1-9298-b2d622d33fde" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.658391] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Acquired lock "refresh_cache-8bff6907-c2b0-4ad1-9298-b2d622d33fde" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 851.658515] env[70020]: DEBUG nova.network.neutron [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 852.013753] env[70020]: DEBUG nova.compute.utils [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 852.015297] env[70020]: DEBUG nova.compute.manager [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Not allocating networking since 'none' was specified. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 852.194292] env[70020]: DEBUG nova.network.neutron [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 852.349560] env[70020]: DEBUG nova.network.neutron [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Updating instance_info_cache with network_info: [{"id": "36d75dbd-aa9f-46d8-ad64-f95577fdb5f6", "address": "fa:16:3e:80:43:1e", "network": {"id": "6de6b9b1-ad1b-4051-9e62-1e2b930a9a3c", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-325447579-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0986c957c04b40e2a8e252212abadb4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96fdbb91-eb49-4dbf-b234-5b38503d7589", "external-id": "nsx-vlan-transportzone-392", "segmentation_id": 392, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36d75dbd-aa", "ovs_interfaceid": "36d75dbd-aa9f-46d8-ad64-f95577fdb5f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.522129] env[70020]: DEBUG nova.compute.manager [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 852.571291] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbf73740-ec38-42a5-b5e6-408ea25ce685 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.582508] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa529752-71e1-4a42-bc69-ef015c2d4cf1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.618559] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2b444f1-294e-4f10-8787-1fad6cd287f8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.627342] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51bb6bcb-8e29-46e9-a517-954b7f1cea11 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.642292] env[70020]: DEBUG nova.compute.provider_tree [None req-ec001563-4b07-487a-83d5-db05d72e8e1f tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 852.855020] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Releasing lock "refresh_cache-8bff6907-c2b0-4ad1-9298-b2d622d33fde" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 852.855020] env[70020]: DEBUG nova.compute.manager [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Instance network_info: |[{"id": "36d75dbd-aa9f-46d8-ad64-f95577fdb5f6", "address": "fa:16:3e:80:43:1e", "network": {"id": "6de6b9b1-ad1b-4051-9e62-1e2b930a9a3c", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-325447579-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0986c957c04b40e2a8e252212abadb4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96fdbb91-eb49-4dbf-b234-5b38503d7589", "external-id": "nsx-vlan-transportzone-392", "segmentation_id": 392, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36d75dbd-aa", "ovs_interfaceid": "36d75dbd-aa9f-46d8-ad64-f95577fdb5f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 852.855020] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:80:43:1e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '96fdbb91-eb49-4dbf-b234-5b38503d7589', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '36d75dbd-aa9f-46d8-ad64-f95577fdb5f6', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 852.861031] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 852.861477] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 852.861809] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8cb47141-4f3e-4f6e-a896-3ff3b688fcb7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.886020] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 852.886020] env[70020]: value = "task-3618318" [ 852.886020] env[70020]: _type = "Task" [ 852.886020] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.893926] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618318, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.098721] env[70020]: DEBUG nova.compute.manager [req-4f380a8d-0af4-461a-995f-34f846b987c9 req-9eb48dd6-acb0-4034-b276-d723ece02c80 service nova] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Received event network-changed-36d75dbd-aa9f-46d8-ad64-f95577fdb5f6 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 853.098942] env[70020]: DEBUG nova.compute.manager [req-4f380a8d-0af4-461a-995f-34f846b987c9 req-9eb48dd6-acb0-4034-b276-d723ece02c80 service nova] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Refreshing instance network info cache due to event network-changed-36d75dbd-aa9f-46d8-ad64-f95577fdb5f6. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 853.099101] env[70020]: DEBUG oslo_concurrency.lockutils [req-4f380a8d-0af4-461a-995f-34f846b987c9 req-9eb48dd6-acb0-4034-b276-d723ece02c80 service nova] Acquiring lock "refresh_cache-8bff6907-c2b0-4ad1-9298-b2d622d33fde" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.099262] env[70020]: DEBUG oslo_concurrency.lockutils [req-4f380a8d-0af4-461a-995f-34f846b987c9 req-9eb48dd6-acb0-4034-b276-d723ece02c80 service nova] Acquired lock "refresh_cache-8bff6907-c2b0-4ad1-9298-b2d622d33fde" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 853.099428] env[70020]: DEBUG nova.network.neutron [req-4f380a8d-0af4-461a-995f-34f846b987c9 req-9eb48dd6-acb0-4034-b276-d723ece02c80 service nova] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Refreshing network info cache for port 36d75dbd-aa9f-46d8-ad64-f95577fdb5f6 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 853.145938] env[70020]: DEBUG nova.scheduler.client.report [None req-ec001563-4b07-487a-83d5-db05d72e8e1f tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 853.396804] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618318, 'name': CreateVM_Task, 'duration_secs': 0.426386} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.396804] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 853.397602] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.397782] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 853.398182] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 853.398458] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a42091b0-77d7-4873-928d-9d6438fdd6d0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.404907] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Waiting for the task: (returnval){ [ 853.404907] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]521c7290-5fa1-c06a-5967-db0c75135b72" [ 853.404907] env[70020]: _type = "Task" [ 853.404907] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.414490] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]521c7290-5fa1-c06a-5967-db0c75135b72, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.533677] env[70020]: DEBUG nova.compute.manager [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 853.556295] env[70020]: DEBUG nova.virt.hardware [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 853.556608] env[70020]: DEBUG nova.virt.hardware [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 853.556813] env[70020]: DEBUG nova.virt.hardware [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 853.557018] env[70020]: DEBUG nova.virt.hardware [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 853.557447] env[70020]: DEBUG nova.virt.hardware [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 853.557645] env[70020]: DEBUG nova.virt.hardware [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 853.557873] env[70020]: DEBUG nova.virt.hardware [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 853.558061] env[70020]: DEBUG nova.virt.hardware [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 853.558233] env[70020]: DEBUG nova.virt.hardware [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 853.558472] env[70020]: DEBUG nova.virt.hardware [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 853.558672] env[70020]: DEBUG nova.virt.hardware [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 853.559647] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4d58f6d-c518-4d90-954a-34beae54d239 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.569696] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55f4a1be-7e14-45f2-82e1-b6fb7397b328 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.587915] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Instance VIF info [] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 853.593818] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Creating folder: Project (b5be191220ad46d4a627d82084faef39). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 853.594290] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9477fdcf-bfae-4109-a89c-aeaed2f3c5cf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.608075] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Created folder: Project (b5be191220ad46d4a627d82084faef39) in parent group-v721521. [ 853.608244] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Creating folder: Instances. Parent ref: group-v721696. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 853.608535] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-95a6d5cf-cee4-4727-b1dd-1600e30b1988 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.621945] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Created folder: Instances in parent group-v721696. [ 853.622037] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 853.622208] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 853.622500] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-50784327-da1a-4619-994c-49bb5d111e64 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.642820] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 853.642820] env[70020]: value = "task-3618321" [ 853.642820] env[70020]: _type = "Task" [ 853.642820] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.655381] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ec001563-4b07-487a-83d5-db05d72e8e1f tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.148s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 853.657359] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618321, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.658375] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.285s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 853.660105] env[70020]: INFO nova.compute.claims [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 853.694737] env[70020]: INFO nova.scheduler.client.report [None req-ec001563-4b07-487a-83d5-db05d72e8e1f tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Deleted allocations for instance b99195a6-866e-4142-970a-42a0564889ef [ 853.859055] env[70020]: DEBUG nova.network.neutron [req-4f380a8d-0af4-461a-995f-34f846b987c9 req-9eb48dd6-acb0-4034-b276-d723ece02c80 service nova] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Updated VIF entry in instance network info cache for port 36d75dbd-aa9f-46d8-ad64-f95577fdb5f6. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 853.859446] env[70020]: DEBUG nova.network.neutron [req-4f380a8d-0af4-461a-995f-34f846b987c9 req-9eb48dd6-acb0-4034-b276-d723ece02c80 service nova] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Updating instance_info_cache with network_info: [{"id": "36d75dbd-aa9f-46d8-ad64-f95577fdb5f6", "address": "fa:16:3e:80:43:1e", "network": {"id": "6de6b9b1-ad1b-4051-9e62-1e2b930a9a3c", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-325447579-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0986c957c04b40e2a8e252212abadb4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96fdbb91-eb49-4dbf-b234-5b38503d7589", "external-id": "nsx-vlan-transportzone-392", "segmentation_id": 392, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36d75dbd-aa", "ovs_interfaceid": "36d75dbd-aa9f-46d8-ad64-f95577fdb5f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.916032] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]521c7290-5fa1-c06a-5967-db0c75135b72, 'name': SearchDatastore_Task, 'duration_secs': 0.014526} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.916350] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 853.916580] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 853.916811] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.916953] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 853.917147] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 853.917401] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-98a76ea9-b9f4-4d01-b0a9-6a5e9cb87cd5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.926578] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 853.926756] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 853.927476] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8be2f655-5144-40c0-ab25-bc6b4ff193db {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.933388] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Waiting for the task: (returnval){ [ 853.933388] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5281a30a-d904-1fa2-9777-fa705e2b2b9c" [ 853.933388] env[70020]: _type = "Task" [ 853.933388] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.941486] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5281a30a-d904-1fa2-9777-fa705e2b2b9c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.156292] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618321, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.205317] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ec001563-4b07-487a-83d5-db05d72e8e1f tempest-TenantUsagesTestJSON-190205799 tempest-TenantUsagesTestJSON-190205799-project-member] Lock "b99195a6-866e-4142-970a-42a0564889ef" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.700s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 854.326604] env[70020]: DEBUG oslo_concurrency.lockutils [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "3a4f2342-58e7-436b-a779-0fa093b52409" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 854.326872] env[70020]: DEBUG oslo_concurrency.lockutils [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "3a4f2342-58e7-436b-a779-0fa093b52409" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 854.362391] env[70020]: DEBUG oslo_concurrency.lockutils [req-4f380a8d-0af4-461a-995f-34f846b987c9 req-9eb48dd6-acb0-4034-b276-d723ece02c80 service nova] Releasing lock "refresh_cache-8bff6907-c2b0-4ad1-9298-b2d622d33fde" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 854.444935] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5281a30a-d904-1fa2-9777-fa705e2b2b9c, 'name': SearchDatastore_Task, 'duration_secs': 0.009119} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.445762] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14e5ce69-b67f-42c6-8a78-7ef336b68557 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.451470] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Waiting for the task: (returnval){ [ 854.451470] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52534107-19c8-14f8-aae5-d17caf66b501" [ 854.451470] env[70020]: _type = "Task" [ 854.451470] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.460064] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52534107-19c8-14f8-aae5-d17caf66b501, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.656897] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618321, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.965292] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52534107-19c8-14f8-aae5-d17caf66b501, 'name': SearchDatastore_Task, 'duration_secs': 0.039485} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.968515] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 854.968866] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 8bff6907-c2b0-4ad1-9298-b2d622d33fde/8bff6907-c2b0-4ad1-9298-b2d622d33fde.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 854.969354] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5a1883cc-dffb-4cf9-8d1c-3944193a7660 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.976972] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Waiting for the task: (returnval){ [ 854.976972] env[70020]: value = "task-3618322" [ 854.976972] env[70020]: _type = "Task" [ 854.976972] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.986987] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618322, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.159684] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618321, 'name': CreateVM_Task, 'duration_secs': 1.334349} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.160569] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 855.161169] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.161358] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 855.161745] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 855.162035] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3030d3ad-dae0-4de2-b4b9-ed6e1601a355 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.167716] env[70020]: DEBUG oslo_vmware.api [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Waiting for the task: (returnval){ [ 855.167716] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52b55ad1-2be5-a628-7533-8fd575d90360" [ 855.167716] env[70020]: _type = "Task" [ 855.167716] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.179666] env[70020]: DEBUG oslo_vmware.api [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b55ad1-2be5-a628-7533-8fd575d90360, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.281200] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35cdab61-9adc-4818-8392-07863ca9b896 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.287532] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5166b7e-b990-4fb3-832e-3453ad681b73 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.321248] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ed8b859-7136-4dfc-bc71-b5d6924a49ab {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.330917] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c28cfe9-ab6a-43f7-97d5-cf47150eef3a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.346627] env[70020]: DEBUG nova.compute.provider_tree [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 855.488619] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618322, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.688518] env[70020]: DEBUG oslo_vmware.api [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b55ad1-2be5-a628-7533-8fd575d90360, 'name': SearchDatastore_Task, 'duration_secs': 0.021365} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.688518] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 855.689563] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 855.690195] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.690195] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 855.690195] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 855.690981] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-69c29264-ee30-4276-985f-0f6ca9b605b3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.708072] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 855.708312] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 855.709172] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-554a504b-5362-45ad-a26b-83ec72db5703 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.718694] env[70020]: DEBUG oslo_vmware.api [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Waiting for the task: (returnval){ [ 855.718694] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52b55aed-b7ae-ccae-dec5-f476a8dc899e" [ 855.718694] env[70020]: _type = "Task" [ 855.718694] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.732747] env[70020]: DEBUG oslo_vmware.api [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b55aed-b7ae-ccae-dec5-f476a8dc899e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.850391] env[70020]: DEBUG nova.scheduler.client.report [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 855.988684] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618322, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.953392} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.988963] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 8bff6907-c2b0-4ad1-9298-b2d622d33fde/8bff6907-c2b0-4ad1-9298-b2d622d33fde.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 855.989315] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 855.989614] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f1bf508b-6bf6-49bb-97ab-8957fe07b9f7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.996988] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Waiting for the task: (returnval){ [ 855.996988] env[70020]: value = "task-3618323" [ 855.996988] env[70020]: _type = "Task" [ 855.996988] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.007380] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618323, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.235021] env[70020]: DEBUG oslo_vmware.api [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b55aed-b7ae-ccae-dec5-f476a8dc899e, 'name': SearchDatastore_Task, 'duration_secs': 0.051425} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.235021] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-621633b0-4f6d-46f4-ba9f-3215f91a7bac {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.239795] env[70020]: DEBUG oslo_vmware.api [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Waiting for the task: (returnval){ [ 856.239795] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52a45939-a37c-1186-056b-6e140ae941a6" [ 856.239795] env[70020]: _type = "Task" [ 856.239795] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.251809] env[70020]: DEBUG oslo_vmware.api [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52a45939-a37c-1186-056b-6e140ae941a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.357023] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.699s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 856.357549] env[70020]: DEBUG nova.compute.manager [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 856.360991] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 32.026s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 856.361200] env[70020]: DEBUG nova.objects.instance [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Trying to apply a migration context that does not seem to be set for this instance {{(pid=70020) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 856.511485] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618323, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.251826} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.511793] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 856.512802] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d708a25f-8c00-4308-9d74-e10321d3030d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.539412] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Reconfiguring VM instance instance-0000003b to attach disk [datastore2] 8bff6907-c2b0-4ad1-9298-b2d622d33fde/8bff6907-c2b0-4ad1-9298-b2d622d33fde.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 856.539750] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f8fe7fa8-958d-4430-ad0f-806744c0f5c8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.563935] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Waiting for the task: (returnval){ [ 856.563935] env[70020]: value = "task-3618324" [ 856.563935] env[70020]: _type = "Task" [ 856.563935] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.574255] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618324, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.751249] env[70020]: DEBUG oslo_vmware.api [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52a45939-a37c-1186-056b-6e140ae941a6, 'name': SearchDatastore_Task, 'duration_secs': 0.011266} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.751603] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 856.751873] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 24184767-92f7-48b3-bbad-16a596ececde/24184767-92f7-48b3-bbad-16a596ececde.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 856.752150] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-484abb86-1a6e-4082-9dd3-4659a4c6b8bc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.761455] env[70020]: DEBUG oslo_vmware.api [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Waiting for the task: (returnval){ [ 856.761455] env[70020]: value = "task-3618325" [ 856.761455] env[70020]: _type = "Task" [ 856.761455] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.773660] env[70020]: DEBUG oslo_vmware.api [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': task-3618325, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.867086] env[70020]: DEBUG nova.compute.utils [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 856.867086] env[70020]: DEBUG nova.compute.manager [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 856.867278] env[70020]: DEBUG nova.network.neutron [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 856.962481] env[70020]: DEBUG nova.policy [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0444367c31244f648085e3a9b99ba26e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5c5ab2d47aff46f8ace55faca4eaf252', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 857.076429] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618324, 'name': ReconfigVM_Task, 'duration_secs': 0.322547} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.076739] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Reconfigured VM instance instance-0000003b to attach disk [datastore2] 8bff6907-c2b0-4ad1-9298-b2d622d33fde/8bff6907-c2b0-4ad1-9298-b2d622d33fde.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 857.077423] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-655348ff-9749-42b4-b02b-faf3a97fcfe0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.086490] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Waiting for the task: (returnval){ [ 857.086490] env[70020]: value = "task-3618326" [ 857.086490] env[70020]: _type = "Task" [ 857.086490] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.096917] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618326, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.279299] env[70020]: DEBUG oslo_vmware.api [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': task-3618325, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.370193] env[70020]: DEBUG nova.compute.manager [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 857.374304] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b43da711-c06a-4f79-8c7b-dc51d244e4bb tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 857.375248] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3ac9bd14-c8e8-48f5-b2cc-50d50b1b1414 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.816s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 857.375457] env[70020]: DEBUG nova.objects.instance [None req-3ac9bd14-c8e8-48f5-b2cc-50d50b1b1414 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lazy-loading 'resources' on Instance uuid f7a42358-f26a-4651-a929-d3836f050648 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 857.597920] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618326, 'name': Rename_Task, 'duration_secs': 0.240838} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.598229] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 857.598502] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9ec421c3-a65e-40d9-b0d5-0d51c86dd450 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.606358] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Waiting for the task: (returnval){ [ 857.606358] env[70020]: value = "task-3618327" [ 857.606358] env[70020]: _type = "Task" [ 857.606358] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.615994] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618327, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.634442] env[70020]: DEBUG nova.network.neutron [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Successfully created port: 826e6050-1881-4e29-a740-868fa0f44788 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 857.775892] env[70020]: DEBUG oslo_vmware.api [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': task-3618325, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.518751} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.776351] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 24184767-92f7-48b3-bbad-16a596ececde/24184767-92f7-48b3-bbad-16a596ececde.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 857.776992] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 857.777410] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0cb0029f-377d-41c3-a912-e2d88dcfc831 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.787734] env[70020]: DEBUG oslo_vmware.api [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Waiting for the task: (returnval){ [ 857.787734] env[70020]: value = "task-3618328" [ 857.787734] env[70020]: _type = "Task" [ 857.787734] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.803502] env[70020]: DEBUG oslo_vmware.api [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': task-3618328, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.122032] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618327, 'name': PowerOnVM_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.299539] env[70020]: DEBUG oslo_vmware.api [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': task-3618328, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.108892} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.301719] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 858.306553] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e139ab61-af52-466d-9e7a-efd2a543608d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.325868] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Reconfiguring VM instance instance-0000003c to attach disk [datastore2] 24184767-92f7-48b3-bbad-16a596ececde/24184767-92f7-48b3-bbad-16a596ececde.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 858.328967] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4a7ee495-5fb2-40e5-8bdb-c6a5dfc3cd93 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.352775] env[70020]: DEBUG oslo_vmware.api [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Waiting for the task: (returnval){ [ 858.352775] env[70020]: value = "task-3618329" [ 858.352775] env[70020]: _type = "Task" [ 858.352775] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.366613] env[70020]: DEBUG oslo_vmware.api [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': task-3618329, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.385697] env[70020]: DEBUG nova.compute.manager [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 858.421748] env[70020]: DEBUG nova.virt.hardware [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 858.421998] env[70020]: DEBUG nova.virt.hardware [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 858.422167] env[70020]: DEBUG nova.virt.hardware [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 858.422370] env[70020]: DEBUG nova.virt.hardware [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 858.422523] env[70020]: DEBUG nova.virt.hardware [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 858.422689] env[70020]: DEBUG nova.virt.hardware [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 858.422913] env[70020]: DEBUG nova.virt.hardware [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 858.423106] env[70020]: DEBUG nova.virt.hardware [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 858.423294] env[70020]: DEBUG nova.virt.hardware [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 858.423448] env[70020]: DEBUG nova.virt.hardware [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 858.423612] env[70020]: DEBUG nova.virt.hardware [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 858.424503] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9953b2a0-0a43-4e55-9a0f-304c86414e3f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.437658] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dabbd632-0720-4fd8-9744-a114ca1e3035 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.582167] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cef0043-a1d1-4b14-a420-0a433cb76499 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.593412] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3001fb72-08e4-4b8e-bfc1-fa1efdd7b864 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.631142] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b7fc62a-0fc5-4b0d-b068-d058b04a1fc2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.642499] env[70020]: DEBUG oslo_vmware.api [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618327, 'name': PowerOnVM_Task, 'duration_secs': 0.59232} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.642499] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 858.642499] env[70020]: INFO nova.compute.manager [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Took 7.84 seconds to spawn the instance on the hypervisor. [ 858.642499] env[70020]: DEBUG nova.compute.manager [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 858.644106] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7b76183-d88c-4548-b16f-e02e7eea84ce {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.646785] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b7b4a74-1010-4e19-b262-ad3d588bf071 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.677552] env[70020]: DEBUG nova.compute.provider_tree [None req-3ac9bd14-c8e8-48f5-b2cc-50d50b1b1414 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 858.865061] env[70020]: DEBUG oslo_vmware.api [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': task-3618329, 'name': ReconfigVM_Task, 'duration_secs': 0.483658} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.865368] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Reconfigured VM instance instance-0000003c to attach disk [datastore2] 24184767-92f7-48b3-bbad-16a596ececde/24184767-92f7-48b3-bbad-16a596ececde.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 858.866010] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4c8d6202-1e2a-47e9-a63b-18fc303e9082 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.878116] env[70020]: DEBUG oslo_vmware.api [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Waiting for the task: (returnval){ [ 858.878116] env[70020]: value = "task-3618330" [ 858.878116] env[70020]: _type = "Task" [ 858.878116] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.891613] env[70020]: DEBUG oslo_vmware.api [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': task-3618330, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.182069] env[70020]: DEBUG nova.scheduler.client.report [None req-3ac9bd14-c8e8-48f5-b2cc-50d50b1b1414 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 859.188282] env[70020]: INFO nova.compute.manager [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Took 42.43 seconds to build instance. [ 859.361379] env[70020]: DEBUG nova.compute.manager [req-924f5452-35c6-4400-9b8e-e90bb66561a6 req-007060ab-9aed-4eed-9c9c-2f0eec55e14e service nova] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Received event network-vif-plugged-826e6050-1881-4e29-a740-868fa0f44788 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 859.361682] env[70020]: DEBUG oslo_concurrency.lockutils [req-924f5452-35c6-4400-9b8e-e90bb66561a6 req-007060ab-9aed-4eed-9c9c-2f0eec55e14e service nova] Acquiring lock "ef85421b-b679-4f38-b052-5695baa2e405-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 859.362099] env[70020]: DEBUG oslo_concurrency.lockutils [req-924f5452-35c6-4400-9b8e-e90bb66561a6 req-007060ab-9aed-4eed-9c9c-2f0eec55e14e service nova] Lock "ef85421b-b679-4f38-b052-5695baa2e405-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 859.362099] env[70020]: DEBUG oslo_concurrency.lockutils [req-924f5452-35c6-4400-9b8e-e90bb66561a6 req-007060ab-9aed-4eed-9c9c-2f0eec55e14e service nova] Lock "ef85421b-b679-4f38-b052-5695baa2e405-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 859.362266] env[70020]: DEBUG nova.compute.manager [req-924f5452-35c6-4400-9b8e-e90bb66561a6 req-007060ab-9aed-4eed-9c9c-2f0eec55e14e service nova] [instance: ef85421b-b679-4f38-b052-5695baa2e405] No waiting events found dispatching network-vif-plugged-826e6050-1881-4e29-a740-868fa0f44788 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 859.362981] env[70020]: WARNING nova.compute.manager [req-924f5452-35c6-4400-9b8e-e90bb66561a6 req-007060ab-9aed-4eed-9c9c-2f0eec55e14e service nova] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Received unexpected event network-vif-plugged-826e6050-1881-4e29-a740-868fa0f44788 for instance with vm_state building and task_state spawning. [ 859.391797] env[70020]: DEBUG oslo_vmware.api [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': task-3618330, 'name': Rename_Task, 'duration_secs': 0.161234} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.391797] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 859.391797] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-25477015-c92d-4482-8864-d6f4d035c8ca {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.398541] env[70020]: DEBUG oslo_vmware.api [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Waiting for the task: (returnval){ [ 859.398541] env[70020]: value = "task-3618331" [ 859.398541] env[70020]: _type = "Task" [ 859.398541] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.410802] env[70020]: DEBUG oslo_vmware.api [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': task-3618331, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.529335] env[70020]: DEBUG nova.network.neutron [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Successfully updated port: 826e6050-1881-4e29-a740-868fa0f44788 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 859.689610] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3ac9bd14-c8e8-48f5-b2cc-50d50b1b1414 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.314s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 859.693612] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.378s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 859.695168] env[70020]: INFO nova.compute.claims [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 859.702396] env[70020]: DEBUG oslo_concurrency.lockutils [None req-62e18d13-1077-4165-9d6e-89b6eb072914 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Lock "8bff6907-c2b0-4ad1-9298-b2d622d33fde" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.404s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 859.740249] env[70020]: INFO nova.scheduler.client.report [None req-3ac9bd14-c8e8-48f5-b2cc-50d50b1b1414 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Deleted allocations for instance f7a42358-f26a-4651-a929-d3836f050648 [ 859.908691] env[70020]: DEBUG oslo_vmware.api [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': task-3618331, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.034303] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Acquiring lock "refresh_cache-ef85421b-b679-4f38-b052-5695baa2e405" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 860.034456] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Acquired lock "refresh_cache-ef85421b-b679-4f38-b052-5695baa2e405" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 860.034637] env[70020]: DEBUG nova.network.neutron [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 860.209489] env[70020]: DEBUG nova.compute.manager [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 860.253736] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3ac9bd14-c8e8-48f5-b2cc-50d50b1b1414 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "f7a42358-f26a-4651-a929-d3836f050648" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.590s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 860.422575] env[70020]: DEBUG oslo_vmware.api [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': task-3618331, 'name': PowerOnVM_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.722492] env[70020]: DEBUG nova.network.neutron [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 860.743975] env[70020]: DEBUG oslo_concurrency.lockutils [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 860.916927] env[70020]: DEBUG oslo_concurrency.lockutils [None req-55c54865-9714-426c-acf5-9dd59b5b911f tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Acquiring lock "f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 860.917286] env[70020]: DEBUG oslo_concurrency.lockutils [None req-55c54865-9714-426c-acf5-9dd59b5b911f tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Lock "f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 860.917392] env[70020]: DEBUG oslo_concurrency.lockutils [None req-55c54865-9714-426c-acf5-9dd59b5b911f tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Acquiring lock "f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 860.917572] env[70020]: DEBUG oslo_concurrency.lockutils [None req-55c54865-9714-426c-acf5-9dd59b5b911f tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Lock "f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 860.917762] env[70020]: DEBUG oslo_concurrency.lockutils [None req-55c54865-9714-426c-acf5-9dd59b5b911f tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Lock "f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 860.919360] env[70020]: DEBUG oslo_vmware.api [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': task-3618331, 'name': PowerOnVM_Task, 'duration_secs': 1.239569} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.922280] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 860.922515] env[70020]: INFO nova.compute.manager [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Took 7.39 seconds to spawn the instance on the hypervisor. [ 860.922698] env[70020]: DEBUG nova.compute.manager [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 860.923411] env[70020]: INFO nova.compute.manager [None req-55c54865-9714-426c-acf5-9dd59b5b911f tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Terminating instance [ 860.925494] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71abb135-5e72-4145-b28e-a915636f0aa2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.945107] env[70020]: DEBUG nova.compute.manager [None req-55c54865-9714-426c-acf5-9dd59b5b911f tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 860.945386] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-55c54865-9714-426c-acf5-9dd59b5b911f tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 860.947149] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f86efd94-78f6-4ef3-b314-1b5d23d36239 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.962290] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-55c54865-9714-426c-acf5-9dd59b5b911f tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 860.962673] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-428f6821-95f3-4292-a88a-30e38f324673 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.971909] env[70020]: DEBUG oslo_vmware.api [None req-55c54865-9714-426c-acf5-9dd59b5b911f tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Waiting for the task: (returnval){ [ 860.971909] env[70020]: value = "task-3618332" [ 860.971909] env[70020]: _type = "Task" [ 860.971909] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.984128] env[70020]: DEBUG oslo_vmware.api [None req-55c54865-9714-426c-acf5-9dd59b5b911f tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618332, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.048416] env[70020]: DEBUG nova.network.neutron [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Updating instance_info_cache with network_info: [{"id": "826e6050-1881-4e29-a740-868fa0f44788", "address": "fa:16:3e:0e:25:d2", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.214", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap826e6050-18", "ovs_interfaceid": "826e6050-1881-4e29-a740-868fa0f44788", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 861.419501] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd4190ba-9131-463b-9c9b-d7c4bc607063 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.432288] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73463b78-edcc-4311-a2b8-86dbfc9c97d6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.483247] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c79a4883-c5a0-43eb-86fb-7743c967552b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.486454] env[70020]: INFO nova.compute.manager [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Took 42.36 seconds to build instance. [ 861.498664] env[70020]: DEBUG oslo_vmware.api [None req-55c54865-9714-426c-acf5-9dd59b5b911f tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618332, 'name': PowerOffVM_Task, 'duration_secs': 0.243836} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.500831] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-55c54865-9714-426c-acf5-9dd59b5b911f tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 861.500831] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-55c54865-9714-426c-acf5-9dd59b5b911f tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 861.500831] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6ee50cf-bdd4-4e2f-990c-66adb0e633b2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.504595] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0d83627d-a072-4b56-a50d-0b16edfc8b95 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.519151] env[70020]: DEBUG nova.compute.provider_tree [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 861.550468] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Releasing lock "refresh_cache-ef85421b-b679-4f38-b052-5695baa2e405" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 861.550468] env[70020]: DEBUG nova.compute.manager [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Instance network_info: |[{"id": "826e6050-1881-4e29-a740-868fa0f44788", "address": "fa:16:3e:0e:25:d2", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.214", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap826e6050-18", "ovs_interfaceid": "826e6050-1881-4e29-a740-868fa0f44788", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 861.550899] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0e:25:d2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '822050c7-1845-485d-b87e-73778d21c33c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '826e6050-1881-4e29-a740-868fa0f44788', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 861.560601] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Creating folder: Project (5c5ab2d47aff46f8ace55faca4eaf252). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 861.561043] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1f500188-9407-4c98-a3b4-f2bf9c9a0920 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.576914] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-55c54865-9714-426c-acf5-9dd59b5b911f tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 861.578052] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-55c54865-9714-426c-acf5-9dd59b5b911f tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 861.578052] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-55c54865-9714-426c-acf5-9dd59b5b911f tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Deleting the datastore file [datastore1] f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 861.579062] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-35f7f635-502a-4194-bc91-a523d2645449 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.581243] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Created folder: Project (5c5ab2d47aff46f8ace55faca4eaf252) in parent group-v721521. [ 861.582040] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Creating folder: Instances. Parent ref: group-v721699. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 861.582040] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-354ea452-97b0-4560-b510-f5f7b39d5cda {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.590930] env[70020]: DEBUG oslo_vmware.api [None req-55c54865-9714-426c-acf5-9dd59b5b911f tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Waiting for the task: (returnval){ [ 861.590930] env[70020]: value = "task-3618335" [ 861.590930] env[70020]: _type = "Task" [ 861.590930] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.597244] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Created folder: Instances in parent group-v721699. [ 861.597874] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 861.598568] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 861.598873] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3394c23c-cf0c-4ad1-8137-c2fdd3748bab {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.617743] env[70020]: DEBUG oslo_vmware.api [None req-55c54865-9714-426c-acf5-9dd59b5b911f tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618335, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.624015] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 861.624015] env[70020]: value = "task-3618337" [ 861.624015] env[70020]: _type = "Task" [ 861.624015] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.632293] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618337, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.731148] env[70020]: DEBUG nova.compute.manager [req-45b486f8-d2f1-4ba1-b560-8f46855dd8e5 req-a5e45285-a026-4112-94e0-f22bf1f7beb9 service nova] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Received event network-changed-826e6050-1881-4e29-a740-868fa0f44788 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 861.731363] env[70020]: DEBUG nova.compute.manager [req-45b486f8-d2f1-4ba1-b560-8f46855dd8e5 req-a5e45285-a026-4112-94e0-f22bf1f7beb9 service nova] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Refreshing instance network info cache due to event network-changed-826e6050-1881-4e29-a740-868fa0f44788. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 861.731582] env[70020]: DEBUG oslo_concurrency.lockutils [req-45b486f8-d2f1-4ba1-b560-8f46855dd8e5 req-a5e45285-a026-4112-94e0-f22bf1f7beb9 service nova] Acquiring lock "refresh_cache-ef85421b-b679-4f38-b052-5695baa2e405" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.731723] env[70020]: DEBUG oslo_concurrency.lockutils [req-45b486f8-d2f1-4ba1-b560-8f46855dd8e5 req-a5e45285-a026-4112-94e0-f22bf1f7beb9 service nova] Acquired lock "refresh_cache-ef85421b-b679-4f38-b052-5695baa2e405" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 861.731875] env[70020]: DEBUG nova.network.neutron [req-45b486f8-d2f1-4ba1-b560-8f46855dd8e5 req-a5e45285-a026-4112-94e0-f22bf1f7beb9 service nova] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Refreshing network info cache for port 826e6050-1881-4e29-a740-868fa0f44788 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 861.865659] env[70020]: DEBUG oslo_concurrency.lockutils [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "9dec24d6-af8a-41b9-920c-e4420fc69417" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 861.865955] env[70020]: DEBUG oslo_concurrency.lockutils [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "9dec24d6-af8a-41b9-920c-e4420fc69417" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 861.993022] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a03f255e-ee1d-4d92-a684-bbebff194864 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Lock "24184767-92f7-48b3-bbad-16a596ececde" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.381s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 862.022413] env[70020]: DEBUG nova.scheduler.client.report [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 862.104146] env[70020]: DEBUG oslo_vmware.api [None req-55c54865-9714-426c-acf5-9dd59b5b911f tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618335, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.205842} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.105342] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-55c54865-9714-426c-acf5-9dd59b5b911f tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 862.105342] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-55c54865-9714-426c-acf5-9dd59b5b911f tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 862.105342] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-55c54865-9714-426c-acf5-9dd59b5b911f tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 862.105342] env[70020]: INFO nova.compute.manager [None req-55c54865-9714-426c-acf5-9dd59b5b911f tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Took 1.16 seconds to destroy the instance on the hypervisor. [ 862.105342] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-55c54865-9714-426c-acf5-9dd59b5b911f tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 862.105342] env[70020]: DEBUG nova.compute.manager [-] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 862.105550] env[70020]: DEBUG nova.network.neutron [-] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 862.138521] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618337, 'name': CreateVM_Task, 'duration_secs': 0.397762} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.138700] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 862.139379] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.139547] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 862.139857] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 862.140119] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42e26499-0a24-4ea3-bfe7-7524e81e7119 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.145919] env[70020]: DEBUG oslo_vmware.api [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Waiting for the task: (returnval){ [ 862.145919] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5280e8dd-8679-141d-eaa6-ca980cf96654" [ 862.145919] env[70020]: _type = "Task" [ 862.145919] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.154660] env[70020]: DEBUG oslo_vmware.api [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5280e8dd-8679-141d-eaa6-ca980cf96654, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.494236] env[70020]: DEBUG oslo_concurrency.lockutils [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Acquiring lock "abc194e3-fb6a-4f2a-8886-e2777530a2a3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 862.494504] env[70020]: DEBUG oslo_concurrency.lockutils [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Lock "abc194e3-fb6a-4f2a-8886-e2777530a2a3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 862.495151] env[70020]: DEBUG nova.compute.manager [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 862.528113] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.835s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 862.528653] env[70020]: DEBUG nova.compute.manager [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 862.534116] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.223s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 862.534116] env[70020]: INFO nova.compute.claims [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 862.551574] env[70020]: DEBUG nova.network.neutron [req-45b486f8-d2f1-4ba1-b560-8f46855dd8e5 req-a5e45285-a026-4112-94e0-f22bf1f7beb9 service nova] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Updated VIF entry in instance network info cache for port 826e6050-1881-4e29-a740-868fa0f44788. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 862.551907] env[70020]: DEBUG nova.network.neutron [req-45b486f8-d2f1-4ba1-b560-8f46855dd8e5 req-a5e45285-a026-4112-94e0-f22bf1f7beb9 service nova] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Updating instance_info_cache with network_info: [{"id": "826e6050-1881-4e29-a740-868fa0f44788", "address": "fa:16:3e:0e:25:d2", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.214", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap826e6050-18", "ovs_interfaceid": "826e6050-1881-4e29-a740-868fa0f44788", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 862.657726] env[70020]: DEBUG oslo_vmware.api [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5280e8dd-8679-141d-eaa6-ca980cf96654, 'name': SearchDatastore_Task, 'duration_secs': 0.013268} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.658053] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 862.658292] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 862.658522] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.658666] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 862.658841] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 862.659132] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-91389a7b-7638-4bb0-ad6d-6b61763ca11d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.669266] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 862.669522] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 862.670305] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b47d0f9-ee13-4af0-943e-7651c640c888 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.678170] env[70020]: DEBUG oslo_vmware.api [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Waiting for the task: (returnval){ [ 862.678170] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]521b3247-ae6d-dd49-ba0c-a8ed795c5389" [ 862.678170] env[70020]: _type = "Task" [ 862.678170] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.688344] env[70020]: DEBUG oslo_vmware.api [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]521b3247-ae6d-dd49-ba0c-a8ed795c5389, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.711698] env[70020]: INFO nova.compute.manager [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Rebuilding instance [ 862.766041] env[70020]: DEBUG nova.compute.manager [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 862.767432] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0f3386e-ff86-4d19-bc0e-d26037743157 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.980007] env[70020]: DEBUG nova.network.neutron [-] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.016339] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 863.043020] env[70020]: DEBUG nova.compute.utils [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 863.043669] env[70020]: DEBUG nova.compute.manager [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 863.043865] env[70020]: DEBUG nova.network.neutron [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 863.055366] env[70020]: DEBUG oslo_concurrency.lockutils [req-45b486f8-d2f1-4ba1-b560-8f46855dd8e5 req-a5e45285-a026-4112-94e0-f22bf1f7beb9 service nova] Releasing lock "refresh_cache-ef85421b-b679-4f38-b052-5695baa2e405" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 863.106656] env[70020]: DEBUG nova.policy [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0444367c31244f648085e3a9b99ba26e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5c5ab2d47aff46f8ace55faca4eaf252', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 863.191561] env[70020]: DEBUG oslo_vmware.api [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]521b3247-ae6d-dd49-ba0c-a8ed795c5389, 'name': SearchDatastore_Task, 'duration_secs': 0.012883} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.192509] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9cc28cb5-b24c-4167-83ab-d708717b67ac {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.198720] env[70020]: DEBUG oslo_vmware.api [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Waiting for the task: (returnval){ [ 863.198720] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52925b03-7d0c-ed5a-e33a-acc8815ae673" [ 863.198720] env[70020]: _type = "Task" [ 863.198720] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.210724] env[70020]: DEBUG oslo_vmware.api [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52925b03-7d0c-ed5a-e33a-acc8815ae673, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.403423] env[70020]: DEBUG nova.network.neutron [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Successfully created port: 089cfbb7-4a17-4371-949f-06f761b4c32b {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 863.485675] env[70020]: INFO nova.compute.manager [-] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Took 1.38 seconds to deallocate network for instance. [ 863.553763] env[70020]: DEBUG nova.compute.manager [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 863.713182] env[70020]: DEBUG oslo_vmware.api [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52925b03-7d0c-ed5a-e33a-acc8815ae673, 'name': SearchDatastore_Task, 'duration_secs': 0.011212} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.715985] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 863.716272] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] ef85421b-b679-4f38-b052-5695baa2e405/ef85421b-b679-4f38-b052-5695baa2e405.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 863.716742] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c386c61b-a30f-4f1d-a187-140e6672fdce {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.724617] env[70020]: DEBUG oslo_vmware.api [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Waiting for the task: (returnval){ [ 863.724617] env[70020]: value = "task-3618338" [ 863.724617] env[70020]: _type = "Task" [ 863.724617] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.759565] env[70020]: DEBUG nova.compute.manager [req-eaa6e1d1-6d53-472e-9433-a499e3ed09e3 req-0e4081b4-d9ef-4f67-8d17-460bb82d2a17 service nova] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Received event network-vif-deleted-a4cb005b-e1cb-4efe-9272-a7adbf42d5bc {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 863.782609] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 863.782908] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1267be1f-047a-441d-8ebc-3d379d405748 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.791077] env[70020]: DEBUG oslo_vmware.api [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Waiting for the task: (returnval){ [ 863.791077] env[70020]: value = "task-3618339" [ 863.791077] env[70020]: _type = "Task" [ 863.791077] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.806097] env[70020]: DEBUG oslo_vmware.api [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': task-3618339, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.994597] env[70020]: DEBUG oslo_concurrency.lockutils [None req-55c54865-9714-426c-acf5-9dd59b5b911f tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 864.167988] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6525f1f5-9460-456e-9c05-1c757700251a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.177127] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d44fce80-b7ab-499a-9488-74ec1d803d3a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.218811] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d43aec0-62bf-4a7f-896b-df5e582c062f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.233577] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10652adc-c460-40b9-a5e8-406c4c9f1c01 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.241701] env[70020]: DEBUG oslo_vmware.api [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618338, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.456651} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.242276] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] ef85421b-b679-4f38-b052-5695baa2e405/ef85421b-b679-4f38-b052-5695baa2e405.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 864.242566] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 864.242780] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b40a5935-61fd-49dd-aca5-b70d03be9b60 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.254157] env[70020]: DEBUG nova.compute.provider_tree [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 864.261257] env[70020]: DEBUG oslo_vmware.api [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Waiting for the task: (returnval){ [ 864.261257] env[70020]: value = "task-3618340" [ 864.261257] env[70020]: _type = "Task" [ 864.261257] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.271646] env[70020]: DEBUG oslo_vmware.api [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618340, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.301181] env[70020]: DEBUG oslo_vmware.api [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': task-3618339, 'name': PowerOffVM_Task, 'duration_secs': 0.153844} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.301483] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 864.301804] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 864.302899] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aa3ae55-5a64-4231-9419-2767a8560c3a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.312491] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 864.312742] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b898c211-e16f-4578-9431-fdc384335757 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.339586] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 864.339842] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 864.340036] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Deleting the datastore file [datastore2] 24184767-92f7-48b3-bbad-16a596ececde {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 864.340327] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c0cd2a29-450b-4b88-ad7d-44a55942dc71 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.347255] env[70020]: DEBUG oslo_vmware.api [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Waiting for the task: (returnval){ [ 864.347255] env[70020]: value = "task-3618342" [ 864.347255] env[70020]: _type = "Task" [ 864.347255] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.355900] env[70020]: DEBUG oslo_vmware.api [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': task-3618342, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.568206] env[70020]: DEBUG nova.compute.manager [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 864.593869] env[70020]: DEBUG nova.virt.hardware [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 864.594129] env[70020]: DEBUG nova.virt.hardware [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 864.594289] env[70020]: DEBUG nova.virt.hardware [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 864.594466] env[70020]: DEBUG nova.virt.hardware [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 864.594611] env[70020]: DEBUG nova.virt.hardware [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 864.594757] env[70020]: DEBUG nova.virt.hardware [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 864.594959] env[70020]: DEBUG nova.virt.hardware [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 864.595132] env[70020]: DEBUG nova.virt.hardware [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 864.595298] env[70020]: DEBUG nova.virt.hardware [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 864.595462] env[70020]: DEBUG nova.virt.hardware [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 864.595618] env[70020]: DEBUG nova.virt.hardware [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 864.596518] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ad60279-afad-4647-baa6-14cfa4d85487 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.605443] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a091a18-4c42-46b4-ae9a-ca80c33e3b2b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.757094] env[70020]: DEBUG nova.scheduler.client.report [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 864.771307] env[70020]: DEBUG oslo_vmware.api [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618340, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073649} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.771939] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 864.772185] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-905499b0-0001-4b0e-b4a4-80b22f96970c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.795821] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] ef85421b-b679-4f38-b052-5695baa2e405/ef85421b-b679-4f38-b052-5695baa2e405.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 864.796578] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-960440ef-47fa-4a8f-b806-e7a4e918d049 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.820169] env[70020]: DEBUG oslo_vmware.api [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Waiting for the task: (returnval){ [ 864.820169] env[70020]: value = "task-3618343" [ 864.820169] env[70020]: _type = "Task" [ 864.820169] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.828787] env[70020]: DEBUG oslo_vmware.api [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618343, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.860118] env[70020]: DEBUG oslo_vmware.api [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': task-3618342, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.09563} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.860118] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 864.860118] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 864.860801] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 864.897188] env[70020]: DEBUG nova.network.neutron [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Successfully updated port: 089cfbb7-4a17-4371-949f-06f761b4c32b {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 865.262589] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.731s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 865.263131] env[70020]: DEBUG nova.compute.manager [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 865.271458] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e267675c-7372-4310-95a1-a344fb35d41b tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.799s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.271458] env[70020]: DEBUG nova.objects.instance [None req-e267675c-7372-4310-95a1-a344fb35d41b tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lazy-loading 'resources' on Instance uuid 55c20886-ae10-4326-a9de-f8577f320a99 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 865.330757] env[70020]: DEBUG oslo_vmware.api [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618343, 'name': ReconfigVM_Task, 'duration_secs': 0.320046} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.331041] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Reconfigured VM instance instance-0000003d to attach disk [datastore1] ef85421b-b679-4f38-b052-5695baa2e405/ef85421b-b679-4f38-b052-5695baa2e405.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 865.331674] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dce504bf-5a28-4548-b910-571afeda7a79 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.340205] env[70020]: DEBUG oslo_vmware.api [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Waiting for the task: (returnval){ [ 865.340205] env[70020]: value = "task-3618344" [ 865.340205] env[70020]: _type = "Task" [ 865.340205] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.350065] env[70020]: DEBUG oslo_vmware.api [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618344, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.399206] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Acquiring lock "refresh_cache-4335f92a-897a-4779-be70-4f0754a66d53" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.399383] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Acquired lock "refresh_cache-4335f92a-897a-4779-be70-4f0754a66d53" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 865.399543] env[70020]: DEBUG nova.network.neutron [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 865.775023] env[70020]: DEBUG nova.compute.utils [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 865.776400] env[70020]: DEBUG nova.compute.manager [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 865.776579] env[70020]: DEBUG nova.network.neutron [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 865.788724] env[70020]: DEBUG nova.compute.manager [req-8b59cd1c-f1ac-4ee9-a810-32df860ff6b8 req-4b9e8254-b348-4bce-a04f-44880a63a78f service nova] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Received event network-vif-plugged-089cfbb7-4a17-4371-949f-06f761b4c32b {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 865.788947] env[70020]: DEBUG oslo_concurrency.lockutils [req-8b59cd1c-f1ac-4ee9-a810-32df860ff6b8 req-4b9e8254-b348-4bce-a04f-44880a63a78f service nova] Acquiring lock "4335f92a-897a-4779-be70-4f0754a66d53-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 865.789201] env[70020]: DEBUG oslo_concurrency.lockutils [req-8b59cd1c-f1ac-4ee9-a810-32df860ff6b8 req-4b9e8254-b348-4bce-a04f-44880a63a78f service nova] Lock "4335f92a-897a-4779-be70-4f0754a66d53-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.789398] env[70020]: DEBUG oslo_concurrency.lockutils [req-8b59cd1c-f1ac-4ee9-a810-32df860ff6b8 req-4b9e8254-b348-4bce-a04f-44880a63a78f service nova] Lock "4335f92a-897a-4779-be70-4f0754a66d53-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 865.789591] env[70020]: DEBUG nova.compute.manager [req-8b59cd1c-f1ac-4ee9-a810-32df860ff6b8 req-4b9e8254-b348-4bce-a04f-44880a63a78f service nova] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] No waiting events found dispatching network-vif-plugged-089cfbb7-4a17-4371-949f-06f761b4c32b {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 865.790084] env[70020]: WARNING nova.compute.manager [req-8b59cd1c-f1ac-4ee9-a810-32df860ff6b8 req-4b9e8254-b348-4bce-a04f-44880a63a78f service nova] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Received unexpected event network-vif-plugged-089cfbb7-4a17-4371-949f-06f761b4c32b for instance with vm_state building and task_state spawning. [ 865.790084] env[70020]: DEBUG nova.compute.manager [req-8b59cd1c-f1ac-4ee9-a810-32df860ff6b8 req-4b9e8254-b348-4bce-a04f-44880a63a78f service nova] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Received event network-changed-089cfbb7-4a17-4371-949f-06f761b4c32b {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 865.790178] env[70020]: DEBUG nova.compute.manager [req-8b59cd1c-f1ac-4ee9-a810-32df860ff6b8 req-4b9e8254-b348-4bce-a04f-44880a63a78f service nova] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Refreshing instance network info cache due to event network-changed-089cfbb7-4a17-4371-949f-06f761b4c32b. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 865.790324] env[70020]: DEBUG oslo_concurrency.lockutils [req-8b59cd1c-f1ac-4ee9-a810-32df860ff6b8 req-4b9e8254-b348-4bce-a04f-44880a63a78f service nova] Acquiring lock "refresh_cache-4335f92a-897a-4779-be70-4f0754a66d53" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.820026] env[70020]: DEBUG nova.policy [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '71a1dc8214b042b28a551bfd8444e0c6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c2a0a96b236e4a7c8f6878d0becfc66b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 865.856143] env[70020]: DEBUG oslo_vmware.api [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618344, 'name': Rename_Task, 'duration_secs': 0.192175} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.856284] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 865.856538] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-89f31df8-c859-4e41-bd46-2d74c4903acb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.865655] env[70020]: DEBUG oslo_vmware.api [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Waiting for the task: (returnval){ [ 865.865655] env[70020]: value = "task-3618345" [ 865.865655] env[70020]: _type = "Task" [ 865.865655] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.877626] env[70020]: DEBUG oslo_vmware.api [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618345, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.908037] env[70020]: DEBUG nova.virt.hardware [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 865.908303] env[70020]: DEBUG nova.virt.hardware [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 865.908459] env[70020]: DEBUG nova.virt.hardware [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 865.908643] env[70020]: DEBUG nova.virt.hardware [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 865.908788] env[70020]: DEBUG nova.virt.hardware [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 865.908933] env[70020]: DEBUG nova.virt.hardware [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 865.909195] env[70020]: DEBUG nova.virt.hardware [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 865.909351] env[70020]: DEBUG nova.virt.hardware [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 865.909568] env[70020]: DEBUG nova.virt.hardware [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 865.909776] env[70020]: DEBUG nova.virt.hardware [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 865.909960] env[70020]: DEBUG nova.virt.hardware [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 865.910898] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55d388f2-2e00-4329-8ef7-b268236974a7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.924556] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-047c861e-d2d6-44bf-a330-bef3cd89cb5a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.940822] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Instance VIF info [] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 865.946950] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 865.950248] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 865.950781] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-01efd236-d540-499d-b1a1-d4ec02031f6e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.964163] env[70020]: DEBUG nova.network.neutron [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 865.975755] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 865.975755] env[70020]: value = "task-3618346" [ 865.975755] env[70020]: _type = "Task" [ 865.975755] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.987383] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618346, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.191676] env[70020]: DEBUG nova.network.neutron [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Updating instance_info_cache with network_info: [{"id": "089cfbb7-4a17-4371-949f-06f761b4c32b", "address": "fa:16:3e:ea:7a:d1", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.16", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap089cfbb7-4a", "ovs_interfaceid": "089cfbb7-4a17-4371-949f-06f761b4c32b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.265305] env[70020]: DEBUG nova.network.neutron [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Successfully created port: d219e46c-ef15-4dec-a495-31e92d46d0c0 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 866.279895] env[70020]: DEBUG nova.compute.manager [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 866.377947] env[70020]: DEBUG oslo_vmware.api [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618345, 'name': PowerOnVM_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.474509] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b22d718c-8239-4c44-9ad0-74a5caadc07c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.485896] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618346, 'name': CreateVM_Task, 'duration_secs': 0.320956} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.487667] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 866.488167] env[70020]: DEBUG oslo_concurrency.lockutils [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.488323] env[70020]: DEBUG oslo_concurrency.lockutils [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 866.488629] env[70020]: DEBUG oslo_concurrency.lockutils [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 866.489586] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13278256-8dba-474d-b794-35ee58bdb886 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.492946] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87f99d1d-9e6a-4102-973e-88a9e3a1c3c2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.498116] env[70020]: DEBUG oslo_vmware.api [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Waiting for the task: (returnval){ [ 866.498116] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52d22c4d-6b38-5e9d-4091-d9a0fd0c55c1" [ 866.498116] env[70020]: _type = "Task" [ 866.498116] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.525677] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8103baca-ecb3-48ff-aa9a-af36c3ac452b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.536717] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b37684e1-4a3e-4e61-9f61-9189cfa5a746 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.540538] env[70020]: DEBUG oslo_vmware.api [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52d22c4d-6b38-5e9d-4091-d9a0fd0c55c1, 'name': SearchDatastore_Task, 'duration_secs': 0.013957} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.540836] env[70020]: DEBUG oslo_concurrency.lockutils [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 866.541089] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 866.541324] env[70020]: DEBUG oslo_concurrency.lockutils [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.541472] env[70020]: DEBUG oslo_concurrency.lockutils [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 866.541646] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 866.542272] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-45f6933e-d867-4147-a77a-6fdc6c561ec7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.554660] env[70020]: DEBUG nova.compute.provider_tree [None req-e267675c-7372-4310-95a1-a344fb35d41b tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 866.565649] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 866.565856] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 866.566586] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-580e6697-f6ac-4913-b5b1-f4adbe6fe2b5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.572482] env[70020]: DEBUG oslo_vmware.api [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Waiting for the task: (returnval){ [ 866.572482] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]520d2e91-3342-0ccd-183a-d6fc0c7e4d00" [ 866.572482] env[70020]: _type = "Task" [ 866.572482] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.581364] env[70020]: DEBUG oslo_vmware.api [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]520d2e91-3342-0ccd-183a-d6fc0c7e4d00, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.694155] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Releasing lock "refresh_cache-4335f92a-897a-4779-be70-4f0754a66d53" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 866.694438] env[70020]: DEBUG nova.compute.manager [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Instance network_info: |[{"id": "089cfbb7-4a17-4371-949f-06f761b4c32b", "address": "fa:16:3e:ea:7a:d1", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.16", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap089cfbb7-4a", "ovs_interfaceid": "089cfbb7-4a17-4371-949f-06f761b4c32b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 866.694714] env[70020]: DEBUG oslo_concurrency.lockutils [req-8b59cd1c-f1ac-4ee9-a810-32df860ff6b8 req-4b9e8254-b348-4bce-a04f-44880a63a78f service nova] Acquired lock "refresh_cache-4335f92a-897a-4779-be70-4f0754a66d53" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 866.694887] env[70020]: DEBUG nova.network.neutron [req-8b59cd1c-f1ac-4ee9-a810-32df860ff6b8 req-4b9e8254-b348-4bce-a04f-44880a63a78f service nova] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Refreshing network info cache for port 089cfbb7-4a17-4371-949f-06f761b4c32b {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 866.696121] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ea:7a:d1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '822050c7-1845-485d-b87e-73778d21c33c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '089cfbb7-4a17-4371-949f-06f761b4c32b', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 866.704981] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 866.708482] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 866.709447] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0c22a142-4bc6-4e9c-8883-bd340cfafc14 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.733892] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 866.733892] env[70020]: value = "task-3618347" [ 866.733892] env[70020]: _type = "Task" [ 866.733892] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.743510] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618347, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.877063] env[70020]: DEBUG oslo_vmware.api [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618345, 'name': PowerOnVM_Task, 'duration_secs': 0.516911} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.877388] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 866.877598] env[70020]: INFO nova.compute.manager [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Took 8.49 seconds to spawn the instance on the hypervisor. [ 866.877780] env[70020]: DEBUG nova.compute.manager [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 866.878577] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88413882-4629-4346-867c-cf0e9f170f66 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.963073] env[70020]: DEBUG nova.network.neutron [req-8b59cd1c-f1ac-4ee9-a810-32df860ff6b8 req-4b9e8254-b348-4bce-a04f-44880a63a78f service nova] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Updated VIF entry in instance network info cache for port 089cfbb7-4a17-4371-949f-06f761b4c32b. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 866.963429] env[70020]: DEBUG nova.network.neutron [req-8b59cd1c-f1ac-4ee9-a810-32df860ff6b8 req-4b9e8254-b348-4bce-a04f-44880a63a78f service nova] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Updating instance_info_cache with network_info: [{"id": "089cfbb7-4a17-4371-949f-06f761b4c32b", "address": "fa:16:3e:ea:7a:d1", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.16", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap089cfbb7-4a", "ovs_interfaceid": "089cfbb7-4a17-4371-949f-06f761b4c32b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.058176] env[70020]: DEBUG nova.scheduler.client.report [None req-e267675c-7372-4310-95a1-a344fb35d41b tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 867.088847] env[70020]: DEBUG oslo_vmware.api [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]520d2e91-3342-0ccd-183a-d6fc0c7e4d00, 'name': SearchDatastore_Task, 'duration_secs': 0.033374} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.090132] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9755e710-32c7-4d74-8fbb-6aa4c63f97d5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.098252] env[70020]: DEBUG oslo_vmware.api [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Waiting for the task: (returnval){ [ 867.098252] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]522a0345-1c8f-b756-5364-b633baa0201b" [ 867.098252] env[70020]: _type = "Task" [ 867.098252] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.106540] env[70020]: DEBUG oslo_vmware.api [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]522a0345-1c8f-b756-5364-b633baa0201b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.192880] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5e724a73-9268-4e66-98ca-3ffb35e160ae tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Acquiring lock "f16d60a4-5f80-4f41-b994-068de48775ad" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 867.192880] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5e724a73-9268-4e66-98ca-3ffb35e160ae tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Lock "f16d60a4-5f80-4f41-b994-068de48775ad" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 867.193088] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5e724a73-9268-4e66-98ca-3ffb35e160ae tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Acquiring lock "f16d60a4-5f80-4f41-b994-068de48775ad-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 867.193569] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5e724a73-9268-4e66-98ca-3ffb35e160ae tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Lock "f16d60a4-5f80-4f41-b994-068de48775ad-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 867.193569] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5e724a73-9268-4e66-98ca-3ffb35e160ae tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Lock "f16d60a4-5f80-4f41-b994-068de48775ad-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 867.196515] env[70020]: INFO nova.compute.manager [None req-5e724a73-9268-4e66-98ca-3ffb35e160ae tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Terminating instance [ 867.246028] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618347, 'name': CreateVM_Task} progress is 25%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.289881] env[70020]: DEBUG nova.compute.manager [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 867.317142] env[70020]: DEBUG nova.virt.hardware [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 867.318030] env[70020]: DEBUG nova.virt.hardware [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 867.318030] env[70020]: DEBUG nova.virt.hardware [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 867.318030] env[70020]: DEBUG nova.virt.hardware [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 867.318030] env[70020]: DEBUG nova.virt.hardware [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 867.318030] env[70020]: DEBUG nova.virt.hardware [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 867.318323] env[70020]: DEBUG nova.virt.hardware [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 867.318371] env[70020]: DEBUG nova.virt.hardware [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 867.318536] env[70020]: DEBUG nova.virt.hardware [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 867.318695] env[70020]: DEBUG nova.virt.hardware [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 867.318877] env[70020]: DEBUG nova.virt.hardware [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 867.320052] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-012d451d-b263-472c-9800-c65b2a228de2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.331426] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd345ab1-56ef-44ad-9ac1-1a1a957b1761 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.398609] env[70020]: INFO nova.compute.manager [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Took 45.04 seconds to build instance. [ 867.466297] env[70020]: DEBUG oslo_concurrency.lockutils [req-8b59cd1c-f1ac-4ee9-a810-32df860ff6b8 req-4b9e8254-b348-4bce-a04f-44880a63a78f service nova] Releasing lock "refresh_cache-4335f92a-897a-4779-be70-4f0754a66d53" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 867.563281] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e267675c-7372-4310-95a1-a344fb35d41b tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.294s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 867.565696] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.115s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 867.567207] env[70020]: INFO nova.compute.claims [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 867.588331] env[70020]: INFO nova.scheduler.client.report [None req-e267675c-7372-4310-95a1-a344fb35d41b tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Deleted allocations for instance 55c20886-ae10-4326-a9de-f8577f320a99 [ 867.609343] env[70020]: DEBUG oslo_vmware.api [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]522a0345-1c8f-b756-5364-b633baa0201b, 'name': SearchDatastore_Task, 'duration_secs': 0.016511} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.610291] env[70020]: DEBUG oslo_concurrency.lockutils [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 867.610596] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 24184767-92f7-48b3-bbad-16a596ececde/24184767-92f7-48b3-bbad-16a596ececde.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 867.610866] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ff181be8-8955-4238-87a8-b1a228f0f6b0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.619287] env[70020]: DEBUG oslo_vmware.api [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Waiting for the task: (returnval){ [ 867.619287] env[70020]: value = "task-3618348" [ 867.619287] env[70020]: _type = "Task" [ 867.619287] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.628851] env[70020]: DEBUG oslo_vmware.api [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': task-3618348, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.700927] env[70020]: DEBUG nova.compute.manager [None req-5e724a73-9268-4e66-98ca-3ffb35e160ae tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 867.702188] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5e724a73-9268-4e66-98ca-3ffb35e160ae tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 867.702188] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8683f7c5-0379-43f0-a2df-fa3070fd155b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.714714] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e724a73-9268-4e66-98ca-3ffb35e160ae tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 867.714714] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-99bd3b00-e525-4631-a9fa-cf5c6ca140e0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.720443] env[70020]: DEBUG oslo_vmware.api [None req-5e724a73-9268-4e66-98ca-3ffb35e160ae tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Waiting for the task: (returnval){ [ 867.720443] env[70020]: value = "task-3618349" [ 867.720443] env[70020]: _type = "Task" [ 867.720443] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.731723] env[70020]: DEBUG oslo_vmware.api [None req-5e724a73-9268-4e66-98ca-3ffb35e160ae tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': task-3618349, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.742500] env[70020]: DEBUG nova.network.neutron [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Successfully updated port: d219e46c-ef15-4dec-a495-31e92d46d0c0 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 867.749753] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618347, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.904158] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf99e76a-ff52-4a95-906e-6f83862c66cc tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Lock "ef85421b-b679-4f38-b052-5695baa2e405" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.051s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 867.931905] env[70020]: DEBUG nova.compute.manager [req-014ed313-4e4a-45bc-8931-b09a4d7da39a req-9cfa6a70-2bd4-482c-a998-6a39ccde5698 service nova] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Received event network-vif-plugged-d219e46c-ef15-4dec-a495-31e92d46d0c0 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 867.932151] env[70020]: DEBUG oslo_concurrency.lockutils [req-014ed313-4e4a-45bc-8931-b09a4d7da39a req-9cfa6a70-2bd4-482c-a998-6a39ccde5698 service nova] Acquiring lock "1d9218db-05d8-4e33-837f-e9865946237f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 867.933103] env[70020]: DEBUG oslo_concurrency.lockutils [req-014ed313-4e4a-45bc-8931-b09a4d7da39a req-9cfa6a70-2bd4-482c-a998-6a39ccde5698 service nova] Lock "1d9218db-05d8-4e33-837f-e9865946237f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 867.933208] env[70020]: DEBUG oslo_concurrency.lockutils [req-014ed313-4e4a-45bc-8931-b09a4d7da39a req-9cfa6a70-2bd4-482c-a998-6a39ccde5698 service nova] Lock "1d9218db-05d8-4e33-837f-e9865946237f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 867.933339] env[70020]: DEBUG nova.compute.manager [req-014ed313-4e4a-45bc-8931-b09a4d7da39a req-9cfa6a70-2bd4-482c-a998-6a39ccde5698 service nova] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] No waiting events found dispatching network-vif-plugged-d219e46c-ef15-4dec-a495-31e92d46d0c0 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 867.936020] env[70020]: WARNING nova.compute.manager [req-014ed313-4e4a-45bc-8931-b09a4d7da39a req-9cfa6a70-2bd4-482c-a998-6a39ccde5698 service nova] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Received unexpected event network-vif-plugged-d219e46c-ef15-4dec-a495-31e92d46d0c0 for instance with vm_state building and task_state spawning. [ 867.936020] env[70020]: DEBUG nova.compute.manager [req-014ed313-4e4a-45bc-8931-b09a4d7da39a req-9cfa6a70-2bd4-482c-a998-6a39ccde5698 service nova] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Received event network-changed-d219e46c-ef15-4dec-a495-31e92d46d0c0 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 867.936020] env[70020]: DEBUG nova.compute.manager [req-014ed313-4e4a-45bc-8931-b09a4d7da39a req-9cfa6a70-2bd4-482c-a998-6a39ccde5698 service nova] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Refreshing instance network info cache due to event network-changed-d219e46c-ef15-4dec-a495-31e92d46d0c0. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 867.936020] env[70020]: DEBUG oslo_concurrency.lockutils [req-014ed313-4e4a-45bc-8931-b09a4d7da39a req-9cfa6a70-2bd4-482c-a998-6a39ccde5698 service nova] Acquiring lock "refresh_cache-1d9218db-05d8-4e33-837f-e9865946237f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.936020] env[70020]: DEBUG oslo_concurrency.lockutils [req-014ed313-4e4a-45bc-8931-b09a4d7da39a req-9cfa6a70-2bd4-482c-a998-6a39ccde5698 service nova] Acquired lock "refresh_cache-1d9218db-05d8-4e33-837f-e9865946237f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 867.936020] env[70020]: DEBUG nova.network.neutron [req-014ed313-4e4a-45bc-8931-b09a4d7da39a req-9cfa6a70-2bd4-482c-a998-6a39ccde5698 service nova] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Refreshing network info cache for port d219e46c-ef15-4dec-a495-31e92d46d0c0 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 868.097287] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e267675c-7372-4310-95a1-a344fb35d41b tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "55c20886-ae10-4326-a9de-f8577f320a99" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.592s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 868.130156] env[70020]: DEBUG oslo_vmware.api [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': task-3618348, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.232659] env[70020]: DEBUG oslo_vmware.api [None req-5e724a73-9268-4e66-98ca-3ffb35e160ae tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': task-3618349, 'name': PowerOffVM_Task, 'duration_secs': 0.24148} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.232918] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e724a73-9268-4e66-98ca-3ffb35e160ae tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 868.233095] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5e724a73-9268-4e66-98ca-3ffb35e160ae tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 868.233350] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aed5618e-e4f5-4bb1-8768-d8950b7960ef {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.244282] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618347, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.249916] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Acquiring lock "refresh_cache-1d9218db-05d8-4e33-837f-e9865946237f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.304720] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5e724a73-9268-4e66-98ca-3ffb35e160ae tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 868.305071] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5e724a73-9268-4e66-98ca-3ffb35e160ae tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 868.305260] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e724a73-9268-4e66-98ca-3ffb35e160ae tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Deleting the datastore file [datastore1] f16d60a4-5f80-4f41-b994-068de48775ad {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 868.305558] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-43c78b5e-57e8-4098-9517-e53cf8c5c859 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.313797] env[70020]: DEBUG oslo_vmware.api [None req-5e724a73-9268-4e66-98ca-3ffb35e160ae tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Waiting for the task: (returnval){ [ 868.313797] env[70020]: value = "task-3618351" [ 868.313797] env[70020]: _type = "Task" [ 868.313797] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.323439] env[70020]: DEBUG oslo_vmware.api [None req-5e724a73-9268-4e66-98ca-3ffb35e160ae tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': task-3618351, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.404040] env[70020]: DEBUG nova.compute.manager [None req-5ad0a96a-76fd-47d3-8106-aafa3e80c662 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 2e9f4ece-0203-4816-a045-447822207697] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 868.487421] env[70020]: DEBUG nova.network.neutron [req-014ed313-4e4a-45bc-8931-b09a4d7da39a req-9cfa6a70-2bd4-482c-a998-6a39ccde5698 service nova] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 868.587714] env[70020]: DEBUG nova.network.neutron [req-014ed313-4e4a-45bc-8931-b09a4d7da39a req-9cfa6a70-2bd4-482c-a998-6a39ccde5698 service nova] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.635936] env[70020]: DEBUG oslo_vmware.api [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': task-3618348, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.749589] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618347, 'name': CreateVM_Task, 'duration_secs': 1.726737} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.749589] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 868.749969] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.750063] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 868.750390] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 868.750685] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e51e6ff-50c5-4250-b25a-ddf52a5b8b80 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.757802] env[70020]: DEBUG oslo_vmware.api [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Waiting for the task: (returnval){ [ 868.757802] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52c29725-8a72-2cd0-f5ad-97a77a4357d4" [ 868.757802] env[70020]: _type = "Task" [ 868.757802] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.770992] env[70020]: DEBUG oslo_vmware.api [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52c29725-8a72-2cd0-f5ad-97a77a4357d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.828302] env[70020]: DEBUG oslo_vmware.api [None req-5e724a73-9268-4e66-98ca-3ffb35e160ae tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Task: {'id': task-3618351, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.23407} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.828302] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e724a73-9268-4e66-98ca-3ffb35e160ae tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 868.828302] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5e724a73-9268-4e66-98ca-3ffb35e160ae tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 868.828598] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5e724a73-9268-4e66-98ca-3ffb35e160ae tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 868.828598] env[70020]: INFO nova.compute.manager [None req-5e724a73-9268-4e66-98ca-3ffb35e160ae tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Took 1.13 seconds to destroy the instance on the hypervisor. [ 868.828840] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5e724a73-9268-4e66-98ca-3ffb35e160ae tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 868.829136] env[70020]: DEBUG nova.compute.manager [-] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 868.829230] env[70020]: DEBUG nova.network.neutron [-] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 868.908397] env[70020]: DEBUG nova.compute.manager [None req-5ad0a96a-76fd-47d3-8106-aafa3e80c662 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 2e9f4ece-0203-4816-a045-447822207697] Instance disappeared before build. {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2446}} [ 868.963529] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73fbd504-e53f-43f1-9171-7f60db02f2ed tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquiring lock "0caa6acd-29d4-43ee-8b32-5149462dfc1c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 868.964114] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73fbd504-e53f-43f1-9171-7f60db02f2ed tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "0caa6acd-29d4-43ee-8b32-5149462dfc1c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 868.964114] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73fbd504-e53f-43f1-9171-7f60db02f2ed tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquiring lock "0caa6acd-29d4-43ee-8b32-5149462dfc1c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 868.964250] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73fbd504-e53f-43f1-9171-7f60db02f2ed tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "0caa6acd-29d4-43ee-8b32-5149462dfc1c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 868.964384] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73fbd504-e53f-43f1-9171-7f60db02f2ed tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "0caa6acd-29d4-43ee-8b32-5149462dfc1c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 868.966712] env[70020]: INFO nova.compute.manager [None req-73fbd504-e53f-43f1-9171-7f60db02f2ed tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Terminating instance [ 869.090170] env[70020]: DEBUG oslo_concurrency.lockutils [req-014ed313-4e4a-45bc-8931-b09a4d7da39a req-9cfa6a70-2bd4-482c-a998-6a39ccde5698 service nova] Releasing lock "refresh_cache-1d9218db-05d8-4e33-837f-e9865946237f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 869.090801] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Acquired lock "refresh_cache-1d9218db-05d8-4e33-837f-e9865946237f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 869.091013] env[70020]: DEBUG nova.network.neutron [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 869.133215] env[70020]: DEBUG oslo_vmware.api [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': task-3618348, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.250984} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.136296] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 24184767-92f7-48b3-bbad-16a596ececde/24184767-92f7-48b3-bbad-16a596ececde.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 869.136574] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 869.138321] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-84bf3cee-21cc-40f4-acb9-dc7cf216b25f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.146126] env[70020]: DEBUG oslo_vmware.api [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Waiting for the task: (returnval){ [ 869.146126] env[70020]: value = "task-3618352" [ 869.146126] env[70020]: _type = "Task" [ 869.146126] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.160760] env[70020]: DEBUG oslo_vmware.api [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': task-3618352, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.238796] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43cfbd74-2ea4-4b6a-bee3-b8b08e4d3766 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.248011] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6856a7a-90b5-4008-a31f-4b428e57a0fb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.292171] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f002313-f3d5-4b93-933b-4f0e669918b8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.304499] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2c5455d-8bed-4076-a476-9e0a546d6fa9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.309459] env[70020]: DEBUG oslo_vmware.api [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52c29725-8a72-2cd0-f5ad-97a77a4357d4, 'name': SearchDatastore_Task, 'duration_secs': 0.049236} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.309996] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 869.310435] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 869.310696] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.310847] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 869.311036] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 869.311921] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d3988caa-3f96-4223-8433-8079913f18bf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.322723] env[70020]: DEBUG nova.compute.provider_tree [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 869.325220] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 869.325418] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 869.326587] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e80c8d3-8d81-4d82-bf1d-ceea397bc063 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.333362] env[70020]: DEBUG oslo_vmware.api [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Waiting for the task: (returnval){ [ 869.333362] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52829c98-4745-06d2-a72a-2b943e242e9c" [ 869.333362] env[70020]: _type = "Task" [ 869.333362] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.345942] env[70020]: DEBUG oslo_vmware.api [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52829c98-4745-06d2-a72a-2b943e242e9c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.435811] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5ad0a96a-76fd-47d3-8106-aafa3e80c662 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "2e9f4ece-0203-4816-a045-447822207697" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.863s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 869.471221] env[70020]: DEBUG nova.compute.manager [None req-73fbd504-e53f-43f1-9171-7f60db02f2ed tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 869.471446] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-73fbd504-e53f-43f1-9171-7f60db02f2ed tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 869.472339] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e5bb4cd-8c8f-465e-a755-a2d754d488b7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.481027] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-73fbd504-e53f-43f1-9171-7f60db02f2ed tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 869.481247] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-277de357-08d6-4f25-a41b-7cdce44251e0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.489523] env[70020]: DEBUG oslo_vmware.api [None req-73fbd504-e53f-43f1-9171-7f60db02f2ed tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 869.489523] env[70020]: value = "task-3618353" [ 869.489523] env[70020]: _type = "Task" [ 869.489523] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.498267] env[70020]: DEBUG oslo_vmware.api [None req-73fbd504-e53f-43f1-9171-7f60db02f2ed tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618353, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.623178] env[70020]: DEBUG nova.network.neutron [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 869.657903] env[70020]: DEBUG oslo_vmware.api [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': task-3618352, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.129494} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.658184] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 869.658985] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24dc7932-d01e-49d2-9986-8e9b8a5e8ad0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.680206] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Reconfiguring VM instance instance-0000003c to attach disk [datastore2] 24184767-92f7-48b3-bbad-16a596ececde/24184767-92f7-48b3-bbad-16a596ececde.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 869.680515] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ac8436fd-5450-4492-9995-95b300fd76ef {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.704178] env[70020]: DEBUG oslo_vmware.api [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Waiting for the task: (returnval){ [ 869.704178] env[70020]: value = "task-3618354" [ 869.704178] env[70020]: _type = "Task" [ 869.704178] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.714492] env[70020]: DEBUG oslo_vmware.api [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': task-3618354, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.782121] env[70020]: DEBUG nova.network.neutron [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Updating instance_info_cache with network_info: [{"id": "d219e46c-ef15-4dec-a495-31e92d46d0c0", "address": "fa:16:3e:7b:73:7b", "network": {"id": "7e18bd4a-4d0c-4e7f-a52d-bbfb68dfe3f1", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-642722899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2a0a96b236e4a7c8f6878d0becfc66b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89f807d9-140f-4a6f-8bce-96795f9482ee", "external-id": "nsx-vlan-transportzone-762", "segmentation_id": 762, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd219e46c-ef", "ovs_interfaceid": "d219e46c-ef15-4dec-a495-31e92d46d0c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.828274] env[70020]: DEBUG nova.scheduler.client.report [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 869.843265] env[70020]: DEBUG nova.network.neutron [-] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.852641] env[70020]: DEBUG oslo_vmware.api [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52829c98-4745-06d2-a72a-2b943e242e9c, 'name': SearchDatastore_Task, 'duration_secs': 0.014307} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.853319] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-095fc430-2331-4930-90aa-1f2250e3d04c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.860329] env[70020]: DEBUG oslo_vmware.api [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Waiting for the task: (returnval){ [ 869.860329] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52b7f189-5eed-5039-7eee-4cfe805a81f1" [ 869.860329] env[70020]: _type = "Task" [ 869.860329] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.870767] env[70020]: DEBUG oslo_vmware.api [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b7f189-5eed-5039-7eee-4cfe805a81f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.937949] env[70020]: DEBUG nova.compute.manager [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 869.965683] env[70020]: DEBUG nova.compute.manager [req-c86cd0e6-9f73-4205-8a7c-cc776dd2c41c req-294b785b-5b0c-45ec-813c-5bc227ee5218 service nova] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Received event network-vif-deleted-320d056b-ab7e-455d-a9dc-f443a22fc563 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 870.000010] env[70020]: DEBUG oslo_vmware.api [None req-73fbd504-e53f-43f1-9171-7f60db02f2ed tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618353, 'name': PowerOffVM_Task, 'duration_secs': 0.447138} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.000275] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-73fbd504-e53f-43f1-9171-7f60db02f2ed tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 870.000439] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-73fbd504-e53f-43f1-9171-7f60db02f2ed tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 870.000685] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a31a623c-1cdd-4308-baf0-c9d2dcb7a485 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.072175] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-73fbd504-e53f-43f1-9171-7f60db02f2ed tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 870.072407] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-73fbd504-e53f-43f1-9171-7f60db02f2ed tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 870.072674] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-73fbd504-e53f-43f1-9171-7f60db02f2ed tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Deleting the datastore file [datastore2] 0caa6acd-29d4-43ee-8b32-5149462dfc1c {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 870.072964] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c7b5b356-a1ab-480a-89f2-c302aa742ab3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.080441] env[70020]: DEBUG oslo_vmware.api [None req-73fbd504-e53f-43f1-9171-7f60db02f2ed tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 870.080441] env[70020]: value = "task-3618356" [ 870.080441] env[70020]: _type = "Task" [ 870.080441] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.089596] env[70020]: DEBUG oslo_vmware.api [None req-73fbd504-e53f-43f1-9171-7f60db02f2ed tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618356, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.216725] env[70020]: DEBUG oslo_vmware.api [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': task-3618354, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.284646] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Releasing lock "refresh_cache-1d9218db-05d8-4e33-837f-e9865946237f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 870.285019] env[70020]: DEBUG nova.compute.manager [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Instance network_info: |[{"id": "d219e46c-ef15-4dec-a495-31e92d46d0c0", "address": "fa:16:3e:7b:73:7b", "network": {"id": "7e18bd4a-4d0c-4e7f-a52d-bbfb68dfe3f1", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-642722899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2a0a96b236e4a7c8f6878d0becfc66b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89f807d9-140f-4a6f-8bce-96795f9482ee", "external-id": "nsx-vlan-transportzone-762", "segmentation_id": 762, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd219e46c-ef", "ovs_interfaceid": "d219e46c-ef15-4dec-a495-31e92d46d0c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 870.285462] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7b:73:7b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89f807d9-140f-4a6f-8bce-96795f9482ee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd219e46c-ef15-4dec-a495-31e92d46d0c0', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 870.293153] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 870.293426] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 870.293635] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6ef52dc9-52c5-4683-a98e-aff8f4e5e30d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.316032] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 870.316032] env[70020]: value = "task-3618357" [ 870.316032] env[70020]: _type = "Task" [ 870.316032] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.324412] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618357, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.334527] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.769s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 870.334910] env[70020]: DEBUG nova.compute.manager [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 870.338464] env[70020]: DEBUG oslo_concurrency.lockutils [None req-dcecbce0-59b1-48cc-9bcb-69e1ee765806 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.052s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 870.338464] env[70020]: DEBUG nova.objects.instance [None req-dcecbce0-59b1-48cc-9bcb-69e1ee765806 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Lazy-loading 'resources' on Instance uuid 38839949-c717-4f0b-97a7-108d87417b88 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 870.346346] env[70020]: INFO nova.compute.manager [-] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Took 1.52 seconds to deallocate network for instance. [ 870.372880] env[70020]: DEBUG oslo_vmware.api [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b7f189-5eed-5039-7eee-4cfe805a81f1, 'name': SearchDatastore_Task, 'duration_secs': 0.02062} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.373293] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 870.373470] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 4335f92a-897a-4779-be70-4f0754a66d53/4335f92a-897a-4779-be70-4f0754a66d53.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 870.373742] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9ffcef9d-791b-4495-8091-99f09fafbbf2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.384174] env[70020]: DEBUG oslo_vmware.api [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Waiting for the task: (returnval){ [ 870.384174] env[70020]: value = "task-3618358" [ 870.384174] env[70020]: _type = "Task" [ 870.384174] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.395749] env[70020]: DEBUG oslo_vmware.api [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618358, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.463144] env[70020]: DEBUG oslo_concurrency.lockutils [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 870.592444] env[70020]: DEBUG oslo_vmware.api [None req-73fbd504-e53f-43f1-9171-7f60db02f2ed tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618356, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.177421} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.592764] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-73fbd504-e53f-43f1-9171-7f60db02f2ed tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 870.592960] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-73fbd504-e53f-43f1-9171-7f60db02f2ed tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 870.593173] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-73fbd504-e53f-43f1-9171-7f60db02f2ed tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 870.593358] env[70020]: INFO nova.compute.manager [None req-73fbd504-e53f-43f1-9171-7f60db02f2ed tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Took 1.12 seconds to destroy the instance on the hypervisor. [ 870.593636] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-73fbd504-e53f-43f1-9171-7f60db02f2ed tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 870.593883] env[70020]: DEBUG nova.compute.manager [-] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 870.593971] env[70020]: DEBUG nova.network.neutron [-] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 870.719115] env[70020]: DEBUG oslo_vmware.api [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': task-3618354, 'name': ReconfigVM_Task, 'duration_secs': 0.568241} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.719587] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Reconfigured VM instance instance-0000003c to attach disk [datastore2] 24184767-92f7-48b3-bbad-16a596ececde/24184767-92f7-48b3-bbad-16a596ececde.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 870.720622] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-03ad737d-8808-45dd-825c-f46df11f3780 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.732046] env[70020]: DEBUG oslo_vmware.api [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Waiting for the task: (returnval){ [ 870.732046] env[70020]: value = "task-3618359" [ 870.732046] env[70020]: _type = "Task" [ 870.732046] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.747567] env[70020]: DEBUG oslo_vmware.api [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': task-3618359, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.834509] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618357, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.841710] env[70020]: DEBUG nova.compute.utils [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 870.847413] env[70020]: DEBUG nova.compute.manager [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 870.847639] env[70020]: DEBUG nova.network.neutron [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 870.854115] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5e724a73-9268-4e66-98ca-3ffb35e160ae tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 870.899775] env[70020]: DEBUG oslo_vmware.api [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618358, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.912857] env[70020]: DEBUG nova.policy [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0a96138e8caf4575854cbe0224b66030', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a63e8bb4fcd844f69aaeade95326a91b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 871.246787] env[70020]: DEBUG oslo_vmware.api [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': task-3618359, 'name': Rename_Task, 'duration_secs': 0.429794} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.247107] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 871.247339] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8f1487d8-613e-485f-aa23-53f9edff0212 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.255459] env[70020]: DEBUG oslo_vmware.api [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Waiting for the task: (returnval){ [ 871.255459] env[70020]: value = "task-3618360" [ 871.255459] env[70020]: _type = "Task" [ 871.255459] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.274716] env[70020]: DEBUG oslo_vmware.api [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': task-3618360, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.274716] env[70020]: DEBUG nova.network.neutron [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Successfully created port: 1217e13a-a21e-45bc-96a7-abdecc27de51 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 871.330436] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618357, 'name': CreateVM_Task, 'duration_secs': 0.567359} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.333061] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 871.334549] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.334827] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 871.335226] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 871.335748] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5dce050a-5f29-442e-a1bb-e8576b23e782 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.343331] env[70020]: DEBUG oslo_vmware.api [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Waiting for the task: (returnval){ [ 871.343331] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]525a7841-3ac7-341a-a00d-3ba2477d9c07" [ 871.343331] env[70020]: _type = "Task" [ 871.343331] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.347130] env[70020]: DEBUG nova.compute.manager [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 871.355620] env[70020]: DEBUG oslo_vmware.api [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]525a7841-3ac7-341a-a00d-3ba2477d9c07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.401416] env[70020]: DEBUG oslo_vmware.api [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618358, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.59337} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.404868] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 4335f92a-897a-4779-be70-4f0754a66d53/4335f92a-897a-4779-be70-4f0754a66d53.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 871.405136] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 871.405816] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1f35a960-4a14-43bb-9515-da21768bc9db {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.415059] env[70020]: DEBUG oslo_vmware.api [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Waiting for the task: (returnval){ [ 871.415059] env[70020]: value = "task-3618361" [ 871.415059] env[70020]: _type = "Task" [ 871.415059] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.431674] env[70020]: DEBUG oslo_vmware.api [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618361, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.467382] env[70020]: DEBUG nova.network.neutron [-] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.531628] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cfc9393-a862-4d08-bda6-3836a471210b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.541158] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b7e5b09-3a7b-4dcd-95ca-4a0036a7e314 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.576550] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bc7d802-6145-4a42-9557-b2d37bbf62c3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.585462] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c4d8e7e-6530-4162-9566-600bae3c9df2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.599621] env[70020]: DEBUG nova.compute.provider_tree [None req-dcecbce0-59b1-48cc-9bcb-69e1ee765806 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 871.766832] env[70020]: DEBUG oslo_vmware.api [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': task-3618360, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.855290] env[70020]: DEBUG oslo_vmware.api [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]525a7841-3ac7-341a-a00d-3ba2477d9c07, 'name': SearchDatastore_Task, 'duration_secs': 0.051465} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.855767] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 871.855993] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 871.856238] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.856381] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 871.856555] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 871.856813] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-db57ee64-64f6-46b5-a45b-32a29862b7eb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.875482] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 871.875696] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 871.876390] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44add0b8-ec9b-43db-a4bd-87d9b413d8f2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.881893] env[70020]: DEBUG oslo_vmware.api [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Waiting for the task: (returnval){ [ 871.881893] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52dd51b6-0131-1ed1-0f1b-6a0bae02ce0a" [ 871.881893] env[70020]: _type = "Task" [ 871.881893] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.892508] env[70020]: DEBUG oslo_vmware.api [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52dd51b6-0131-1ed1-0f1b-6a0bae02ce0a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.931805] env[70020]: DEBUG oslo_vmware.api [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618361, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074569} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.932099] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 871.932900] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29023238-dca9-4b97-81ab-e52e4d22e800 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.955842] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Reconfiguring VM instance instance-0000003e to attach disk [datastore2] 4335f92a-897a-4779-be70-4f0754a66d53/4335f92a-897a-4779-be70-4f0754a66d53.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 871.956481] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d8e4ac9e-87dc-4c06-a404-752ba6e87c49 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.971298] env[70020]: INFO nova.compute.manager [-] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Took 1.38 seconds to deallocate network for instance. [ 871.983839] env[70020]: DEBUG oslo_vmware.api [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Waiting for the task: (returnval){ [ 871.983839] env[70020]: value = "task-3618362" [ 871.983839] env[70020]: _type = "Task" [ 871.983839] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.992739] env[70020]: DEBUG oslo_vmware.api [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618362, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.018847] env[70020]: DEBUG nova.compute.manager [req-57ae2477-6849-48f3-a8c2-1e668490d94e req-8b5bb8b1-8d12-463c-bb40-2dc59822698c service nova] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Received event network-vif-deleted-2e160e96-59d2-4391-adfe-0ebb379762cd {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 872.103362] env[70020]: DEBUG nova.scheduler.client.report [None req-dcecbce0-59b1-48cc-9bcb-69e1ee765806 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 872.270125] env[70020]: DEBUG oslo_vmware.api [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': task-3618360, 'name': PowerOnVM_Task} progress is 88%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.357577] env[70020]: DEBUG nova.compute.manager [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 872.385242] env[70020]: DEBUG nova.virt.hardware [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 872.385489] env[70020]: DEBUG nova.virt.hardware [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 872.385645] env[70020]: DEBUG nova.virt.hardware [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 872.385827] env[70020]: DEBUG nova.virt.hardware [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 872.385969] env[70020]: DEBUG nova.virt.hardware [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 872.386127] env[70020]: DEBUG nova.virt.hardware [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 872.386333] env[70020]: DEBUG nova.virt.hardware [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 872.386488] env[70020]: DEBUG nova.virt.hardware [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 872.386652] env[70020]: DEBUG nova.virt.hardware [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 872.386810] env[70020]: DEBUG nova.virt.hardware [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 872.386976] env[70020]: DEBUG nova.virt.hardware [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 872.387903] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2a727cc-30ff-4dd6-8b93-b66a31266656 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.404302] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdc1e2cd-9de0-48c4-83f6-d01631c56bc6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.409309] env[70020]: DEBUG oslo_vmware.api [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52dd51b6-0131-1ed1-0f1b-6a0bae02ce0a, 'name': SearchDatastore_Task, 'duration_secs': 0.043301} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.410027] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a24a2962-0109-452c-93e4-90de510e3036 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.424171] env[70020]: DEBUG oslo_vmware.api [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Waiting for the task: (returnval){ [ 872.424171] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52619cd8-f5ab-b5cb-57e0-e97dbbddc9e7" [ 872.424171] env[70020]: _type = "Task" [ 872.424171] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.433019] env[70020]: DEBUG oslo_vmware.api [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52619cd8-f5ab-b5cb-57e0-e97dbbddc9e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.478200] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73fbd504-e53f-43f1-9171-7f60db02f2ed tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 872.494056] env[70020]: DEBUG oslo_vmware.api [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618362, 'name': ReconfigVM_Task, 'duration_secs': 0.43892} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.494329] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Reconfigured VM instance instance-0000003e to attach disk [datastore2] 4335f92a-897a-4779-be70-4f0754a66d53/4335f92a-897a-4779-be70-4f0754a66d53.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 872.494977] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a6e71b6e-b0a9-41c7-b874-2d4bf4ac3fce {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.502942] env[70020]: DEBUG oslo_vmware.api [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Waiting for the task: (returnval){ [ 872.502942] env[70020]: value = "task-3618363" [ 872.502942] env[70020]: _type = "Task" [ 872.502942] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.513392] env[70020]: DEBUG oslo_vmware.api [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618363, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.610306] env[70020]: DEBUG oslo_concurrency.lockutils [None req-dcecbce0-59b1-48cc-9bcb-69e1ee765806 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.273s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 872.614388] env[70020]: DEBUG oslo_concurrency.lockutils [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.935s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 872.614766] env[70020]: INFO nova.compute.claims [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 872.637025] env[70020]: INFO nova.scheduler.client.report [None req-dcecbce0-59b1-48cc-9bcb-69e1ee765806 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Deleted allocations for instance 38839949-c717-4f0b-97a7-108d87417b88 [ 872.767903] env[70020]: DEBUG oslo_vmware.api [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': task-3618360, 'name': PowerOnVM_Task, 'duration_secs': 1.102522} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.768221] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 872.768463] env[70020]: DEBUG nova.compute.manager [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 872.769322] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6097beb8-744f-4459-bbb5-e7f9517884b6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.793182] env[70020]: DEBUG nova.network.neutron [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Successfully updated port: 1217e13a-a21e-45bc-96a7-abdecc27de51 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 872.935707] env[70020]: DEBUG oslo_vmware.api [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52619cd8-f5ab-b5cb-57e0-e97dbbddc9e7, 'name': SearchDatastore_Task, 'duration_secs': 0.010482} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.935941] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 872.936236] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 1d9218db-05d8-4e33-837f-e9865946237f/1d9218db-05d8-4e33-837f-e9865946237f.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 872.936499] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2449d0d7-69d5-482c-893e-11cac4b664b9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.944405] env[70020]: DEBUG oslo_vmware.api [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Waiting for the task: (returnval){ [ 872.944405] env[70020]: value = "task-3618364" [ 872.944405] env[70020]: _type = "Task" [ 872.944405] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.953631] env[70020]: DEBUG oslo_vmware.api [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': task-3618364, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.017792] env[70020]: DEBUG oslo_vmware.api [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618363, 'name': Rename_Task, 'duration_secs': 0.150068} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.018191] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 873.018509] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9644fee0-c807-4689-b3ff-729339f74ef2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.027330] env[70020]: DEBUG oslo_vmware.api [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Waiting for the task: (returnval){ [ 873.027330] env[70020]: value = "task-3618365" [ 873.027330] env[70020]: _type = "Task" [ 873.027330] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.038049] env[70020]: DEBUG oslo_vmware.api [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618365, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.149810] env[70020]: DEBUG oslo_concurrency.lockutils [None req-dcecbce0-59b1-48cc-9bcb-69e1ee765806 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Lock "38839949-c717-4f0b-97a7-108d87417b88" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.791s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 873.293475] env[70020]: DEBUG oslo_concurrency.lockutils [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 873.295230] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Acquiring lock "refresh_cache-d3dbc3d1-bba7-4803-bacb-02de27a6a4ff" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.295366] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Acquired lock "refresh_cache-d3dbc3d1-bba7-4803-bacb-02de27a6a4ff" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 873.295537] env[70020]: DEBUG nova.network.neutron [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 873.458618] env[70020]: DEBUG oslo_vmware.api [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': task-3618364, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.538631] env[70020]: DEBUG oslo_vmware.api [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618365, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.837145] env[70020]: DEBUG nova.network.neutron [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 873.956039] env[70020]: DEBUG oslo_vmware.api [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': task-3618364, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.563482} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.956304] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 1d9218db-05d8-4e33-837f-e9865946237f/1d9218db-05d8-4e33-837f-e9865946237f.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 873.956558] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 873.956808] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5dc34983-52cd-4916-9f1e-fddf304f894c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.969210] env[70020]: DEBUG oslo_vmware.api [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Waiting for the task: (returnval){ [ 873.969210] env[70020]: value = "task-3618366" [ 873.969210] env[70020]: _type = "Task" [ 873.969210] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.981579] env[70020]: DEBUG oslo_vmware.api [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': task-3618366, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.004016] env[70020]: DEBUG nova.network.neutron [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Updating instance_info_cache with network_info: [{"id": "1217e13a-a21e-45bc-96a7-abdecc27de51", "address": "fa:16:3e:32:59:aa", "network": {"id": "1d8737b3-4820-4ad1-8d76-9ab1a7db867e", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2006556938-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a63e8bb4fcd844f69aaeade95326a91b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f847601f-7479-48eb-842f-41f94eea8537", "external-id": "nsx-vlan-transportzone-35", "segmentation_id": 35, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1217e13a-a2", "ovs_interfaceid": "1217e13a-a21e-45bc-96a7-abdecc27de51", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 874.043227] env[70020]: DEBUG oslo_vmware.api [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618365, 'name': PowerOnVM_Task, 'duration_secs': 0.987199} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.043501] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 874.043743] env[70020]: INFO nova.compute.manager [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Took 9.48 seconds to spawn the instance on the hypervisor. [ 874.043925] env[70020]: DEBUG nova.compute.manager [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 874.044734] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d1154c8-e6f6-4ac0-9a74-66283aab7b16 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.053694] env[70020]: DEBUG nova.compute.manager [req-da63d171-a26e-47c4-9015-0c8f1b7effe2 req-9662dbf5-2cd6-4bcd-9ba9-ec2b9d4c8f3e service nova] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Received event network-vif-plugged-1217e13a-a21e-45bc-96a7-abdecc27de51 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 874.054264] env[70020]: DEBUG oslo_concurrency.lockutils [req-da63d171-a26e-47c4-9015-0c8f1b7effe2 req-9662dbf5-2cd6-4bcd-9ba9-ec2b9d4c8f3e service nova] Acquiring lock "d3dbc3d1-bba7-4803-bacb-02de27a6a4ff-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 874.054480] env[70020]: DEBUG oslo_concurrency.lockutils [req-da63d171-a26e-47c4-9015-0c8f1b7effe2 req-9662dbf5-2cd6-4bcd-9ba9-ec2b9d4c8f3e service nova] Lock "d3dbc3d1-bba7-4803-bacb-02de27a6a4ff-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 874.054649] env[70020]: DEBUG oslo_concurrency.lockutils [req-da63d171-a26e-47c4-9015-0c8f1b7effe2 req-9662dbf5-2cd6-4bcd-9ba9-ec2b9d4c8f3e service nova] Lock "d3dbc3d1-bba7-4803-bacb-02de27a6a4ff-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 874.054817] env[70020]: DEBUG nova.compute.manager [req-da63d171-a26e-47c4-9015-0c8f1b7effe2 req-9662dbf5-2cd6-4bcd-9ba9-ec2b9d4c8f3e service nova] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] No waiting events found dispatching network-vif-plugged-1217e13a-a21e-45bc-96a7-abdecc27de51 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 874.055014] env[70020]: WARNING nova.compute.manager [req-da63d171-a26e-47c4-9015-0c8f1b7effe2 req-9662dbf5-2cd6-4bcd-9ba9-ec2b9d4c8f3e service nova] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Received unexpected event network-vif-plugged-1217e13a-a21e-45bc-96a7-abdecc27de51 for instance with vm_state building and task_state spawning. [ 874.055997] env[70020]: DEBUG nova.compute.manager [req-da63d171-a26e-47c4-9015-0c8f1b7effe2 req-9662dbf5-2cd6-4bcd-9ba9-ec2b9d4c8f3e service nova] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Received event network-changed-1217e13a-a21e-45bc-96a7-abdecc27de51 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 874.056198] env[70020]: DEBUG nova.compute.manager [req-da63d171-a26e-47c4-9015-0c8f1b7effe2 req-9662dbf5-2cd6-4bcd-9ba9-ec2b9d4c8f3e service nova] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Refreshing instance network info cache due to event network-changed-1217e13a-a21e-45bc-96a7-abdecc27de51. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 874.056376] env[70020]: DEBUG oslo_concurrency.lockutils [req-da63d171-a26e-47c4-9015-0c8f1b7effe2 req-9662dbf5-2cd6-4bcd-9ba9-ec2b9d4c8f3e service nova] Acquiring lock "refresh_cache-d3dbc3d1-bba7-4803-bacb-02de27a6a4ff" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.137178] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b327bb9d-0787-430c-b40d-be2968095744 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Acquiring lock "16c45b86-317a-4d0c-a402-51c85af37a5b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 874.137413] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b327bb9d-0787-430c-b40d-be2968095744 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Lock "16c45b86-317a-4d0c-a402-51c85af37a5b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 874.137610] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b327bb9d-0787-430c-b40d-be2968095744 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Acquiring lock "16c45b86-317a-4d0c-a402-51c85af37a5b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 874.137793] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b327bb9d-0787-430c-b40d-be2968095744 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Lock "16c45b86-317a-4d0c-a402-51c85af37a5b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 874.137958] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b327bb9d-0787-430c-b40d-be2968095744 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Lock "16c45b86-317a-4d0c-a402-51c85af37a5b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 874.139891] env[70020]: INFO nova.compute.manager [None req-b327bb9d-0787-430c-b40d-be2968095744 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Terminating instance [ 874.194248] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ac7893a-914c-4009-b46e-125941ecda1c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.202010] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4581aba0-afcc-44f7-a4b7-34c09e071e5b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.234921] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1521c0c-f1c4-4975-8812-fa9b5eeda5c0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.243486] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-650bfe68-bc63-4fcb-92ad-7b31082f65d2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.257491] env[70020]: DEBUG nova.compute.provider_tree [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 874.407577] env[70020]: DEBUG oslo_concurrency.lockutils [None req-460a3a54-9746-4733-a697-2f6b1e3e430d tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Acquiring lock "24184767-92f7-48b3-bbad-16a596ececde" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 874.407940] env[70020]: DEBUG oslo_concurrency.lockutils [None req-460a3a54-9746-4733-a697-2f6b1e3e430d tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Lock "24184767-92f7-48b3-bbad-16a596ececde" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 874.408097] env[70020]: DEBUG oslo_concurrency.lockutils [None req-460a3a54-9746-4733-a697-2f6b1e3e430d tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Acquiring lock "24184767-92f7-48b3-bbad-16a596ececde-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 874.408302] env[70020]: DEBUG oslo_concurrency.lockutils [None req-460a3a54-9746-4733-a697-2f6b1e3e430d tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Lock "24184767-92f7-48b3-bbad-16a596ececde-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 874.408441] env[70020]: DEBUG oslo_concurrency.lockutils [None req-460a3a54-9746-4733-a697-2f6b1e3e430d tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Lock "24184767-92f7-48b3-bbad-16a596ececde-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 874.411134] env[70020]: INFO nova.compute.manager [None req-460a3a54-9746-4733-a697-2f6b1e3e430d tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Terminating instance [ 874.480513] env[70020]: DEBUG oslo_vmware.api [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': task-3618366, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081633} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.480834] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 874.481657] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f3f3a22-2a02-47e4-8022-9d2594ad14c9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.506146] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Reconfiguring VM instance instance-0000003f to attach disk [datastore2] 1d9218db-05d8-4e33-837f-e9865946237f/1d9218db-05d8-4e33-837f-e9865946237f.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 874.506432] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2030b345-4d17-452b-8b12-93e4d4e9d35a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.521176] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Releasing lock "refresh_cache-d3dbc3d1-bba7-4803-bacb-02de27a6a4ff" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 874.521461] env[70020]: DEBUG nova.compute.manager [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Instance network_info: |[{"id": "1217e13a-a21e-45bc-96a7-abdecc27de51", "address": "fa:16:3e:32:59:aa", "network": {"id": "1d8737b3-4820-4ad1-8d76-9ab1a7db867e", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2006556938-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a63e8bb4fcd844f69aaeade95326a91b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f847601f-7479-48eb-842f-41f94eea8537", "external-id": "nsx-vlan-transportzone-35", "segmentation_id": 35, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1217e13a-a2", "ovs_interfaceid": "1217e13a-a21e-45bc-96a7-abdecc27de51", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 874.521738] env[70020]: DEBUG oslo_concurrency.lockutils [req-da63d171-a26e-47c4-9015-0c8f1b7effe2 req-9662dbf5-2cd6-4bcd-9ba9-ec2b9d4c8f3e service nova] Acquired lock "refresh_cache-d3dbc3d1-bba7-4803-bacb-02de27a6a4ff" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 874.521912] env[70020]: DEBUG nova.network.neutron [req-da63d171-a26e-47c4-9015-0c8f1b7effe2 req-9662dbf5-2cd6-4bcd-9ba9-ec2b9d4c8f3e service nova] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Refreshing network info cache for port 1217e13a-a21e-45bc-96a7-abdecc27de51 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 874.523120] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:32:59:aa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f847601f-7479-48eb-842f-41f94eea8537', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1217e13a-a21e-45bc-96a7-abdecc27de51', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 874.531118] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 874.531613] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 874.532414] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bb4c15cb-5728-4d8c-a95e-7c3ad261c25f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.548085] env[70020]: DEBUG oslo_vmware.api [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Waiting for the task: (returnval){ [ 874.548085] env[70020]: value = "task-3618367" [ 874.548085] env[70020]: _type = "Task" [ 874.548085] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.554424] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 874.554424] env[70020]: value = "task-3618368" [ 874.554424] env[70020]: _type = "Task" [ 874.554424] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.561122] env[70020]: DEBUG oslo_vmware.api [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': task-3618367, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.570405] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618368, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.574770] env[70020]: INFO nova.compute.manager [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Took 46.28 seconds to build instance. [ 874.643343] env[70020]: DEBUG nova.compute.manager [None req-b327bb9d-0787-430c-b40d-be2968095744 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 874.643610] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b327bb9d-0787-430c-b40d-be2968095744 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 874.644697] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cadc9ecb-c462-4b20-828b-2e7fdf265c85 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.653689] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b327bb9d-0787-430c-b40d-be2968095744 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 874.654079] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ccaca373-7b12-4609-a46d-51da269fff8c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.662574] env[70020]: DEBUG oslo_vmware.api [None req-b327bb9d-0787-430c-b40d-be2968095744 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Waiting for the task: (returnval){ [ 874.662574] env[70020]: value = "task-3618369" [ 874.662574] env[70020]: _type = "Task" [ 874.662574] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.672239] env[70020]: DEBUG oslo_vmware.api [None req-b327bb9d-0787-430c-b40d-be2968095744 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Task: {'id': task-3618369, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.762038] env[70020]: DEBUG nova.scheduler.client.report [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 874.915681] env[70020]: DEBUG oslo_concurrency.lockutils [None req-460a3a54-9746-4733-a697-2f6b1e3e430d tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Acquiring lock "refresh_cache-24184767-92f7-48b3-bbad-16a596ececde" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.915852] env[70020]: DEBUG oslo_concurrency.lockutils [None req-460a3a54-9746-4733-a697-2f6b1e3e430d tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Acquired lock "refresh_cache-24184767-92f7-48b3-bbad-16a596ececde" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 874.916460] env[70020]: DEBUG nova.network.neutron [None req-460a3a54-9746-4733-a697-2f6b1e3e430d tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 875.062294] env[70020]: DEBUG oslo_vmware.api [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': task-3618367, 'name': ReconfigVM_Task, 'duration_secs': 0.338385} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.063128] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Reconfigured VM instance instance-0000003f to attach disk [datastore2] 1d9218db-05d8-4e33-837f-e9865946237f/1d9218db-05d8-4e33-837f-e9865946237f.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 875.066059] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4419af76-d2da-40a8-a349-13fce34f47be {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.072954] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618368, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.077089] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e48523d2-22e0-40c3-9b7a-3637dfd74f7e tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Lock "4335f92a-897a-4779-be70-4f0754a66d53" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.046s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 875.080529] env[70020]: DEBUG oslo_vmware.api [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Waiting for the task: (returnval){ [ 875.080529] env[70020]: value = "task-3618370" [ 875.080529] env[70020]: _type = "Task" [ 875.080529] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.096461] env[70020]: DEBUG oslo_vmware.api [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': task-3618370, 'name': Rename_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.177032] env[70020]: DEBUG oslo_vmware.api [None req-b327bb9d-0787-430c-b40d-be2968095744 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Task: {'id': task-3618369, 'name': PowerOffVM_Task, 'duration_secs': 0.23021} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.177032] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b327bb9d-0787-430c-b40d-be2968095744 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 875.177032] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b327bb9d-0787-430c-b40d-be2968095744 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 875.177032] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-08dbcf3a-4cc1-438e-a2bb-b54999a38788 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.258122] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b327bb9d-0787-430c-b40d-be2968095744 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 875.258395] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b327bb9d-0787-430c-b40d-be2968095744 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 875.258583] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-b327bb9d-0787-430c-b40d-be2968095744 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Deleting the datastore file [datastore2] 16c45b86-317a-4d0c-a402-51c85af37a5b {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 875.258873] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e28db37c-c10e-4c35-9eb3-5872d8991851 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.266143] env[70020]: DEBUG oslo_vmware.api [None req-b327bb9d-0787-430c-b40d-be2968095744 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Waiting for the task: (returnval){ [ 875.266143] env[70020]: value = "task-3618372" [ 875.266143] env[70020]: _type = "Task" [ 875.266143] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.266962] env[70020]: DEBUG oslo_concurrency.lockutils [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.654s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 875.267456] env[70020]: DEBUG nova.compute.manager [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 875.274381] env[70020]: DEBUG oslo_concurrency.lockutils [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.796s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 875.275949] env[70020]: INFO nova.compute.claims [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 875.279706] env[70020]: DEBUG nova.network.neutron [req-da63d171-a26e-47c4-9015-0c8f1b7effe2 req-9662dbf5-2cd6-4bcd-9ba9-ec2b9d4c8f3e service nova] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Updated VIF entry in instance network info cache for port 1217e13a-a21e-45bc-96a7-abdecc27de51. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 875.280034] env[70020]: DEBUG nova.network.neutron [req-da63d171-a26e-47c4-9015-0c8f1b7effe2 req-9662dbf5-2cd6-4bcd-9ba9-ec2b9d4c8f3e service nova] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Updating instance_info_cache with network_info: [{"id": "1217e13a-a21e-45bc-96a7-abdecc27de51", "address": "fa:16:3e:32:59:aa", "network": {"id": "1d8737b3-4820-4ad1-8d76-9ab1a7db867e", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2006556938-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a63e8bb4fcd844f69aaeade95326a91b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f847601f-7479-48eb-842f-41f94eea8537", "external-id": "nsx-vlan-transportzone-35", "segmentation_id": 35, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1217e13a-a2", "ovs_interfaceid": "1217e13a-a21e-45bc-96a7-abdecc27de51", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.287220] env[70020]: DEBUG oslo_vmware.api [None req-b327bb9d-0787-430c-b40d-be2968095744 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Task: {'id': task-3618372, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.438451] env[70020]: DEBUG nova.network.neutron [None req-460a3a54-9746-4733-a697-2f6b1e3e430d tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 875.491939] env[70020]: DEBUG nova.network.neutron [None req-460a3a54-9746-4733-a697-2f6b1e3e430d tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.565336] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618368, 'name': CreateVM_Task, 'duration_secs': 0.579873} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.565510] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 875.566206] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.566375] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 875.566761] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 875.567043] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e8dcc3d-37a9-4ccc-839d-a40f39c32c0a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.572159] env[70020]: DEBUG oslo_vmware.api [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for the task: (returnval){ [ 875.572159] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52d3b94c-9448-5da6-d5bb-958cb252d3d3" [ 875.572159] env[70020]: _type = "Task" [ 875.572159] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.580909] env[70020]: DEBUG oslo_vmware.api [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52d3b94c-9448-5da6-d5bb-958cb252d3d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.580909] env[70020]: DEBUG nova.compute.manager [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 875.592275] env[70020]: DEBUG oslo_vmware.api [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': task-3618370, 'name': Rename_Task, 'duration_secs': 0.15982} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.592505] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 875.592886] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-abe08af9-b88d-40db-b36f-4eb36df498af {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.602211] env[70020]: DEBUG oslo_vmware.api [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Waiting for the task: (returnval){ [ 875.602211] env[70020]: value = "task-3618373" [ 875.602211] env[70020]: _type = "Task" [ 875.602211] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.613038] env[70020]: DEBUG oslo_vmware.api [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': task-3618373, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.777796] env[70020]: DEBUG oslo_vmware.api [None req-b327bb9d-0787-430c-b40d-be2968095744 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Task: {'id': task-3618372, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.444062} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.778262] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-b327bb9d-0787-430c-b40d-be2968095744 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 875.778583] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b327bb9d-0787-430c-b40d-be2968095744 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 875.778911] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b327bb9d-0787-430c-b40d-be2968095744 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 875.780189] env[70020]: INFO nova.compute.manager [None req-b327bb9d-0787-430c-b40d-be2968095744 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Took 1.14 seconds to destroy the instance on the hypervisor. [ 875.780189] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b327bb9d-0787-430c-b40d-be2968095744 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 875.780761] env[70020]: DEBUG nova.compute.utils [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 875.784357] env[70020]: DEBUG nova.compute.manager [-] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 875.784357] env[70020]: DEBUG nova.network.neutron [-] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 875.786575] env[70020]: DEBUG nova.compute.manager [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 875.786575] env[70020]: DEBUG nova.network.neutron [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 875.789152] env[70020]: DEBUG oslo_concurrency.lockutils [req-da63d171-a26e-47c4-9015-0c8f1b7effe2 req-9662dbf5-2cd6-4bcd-9ba9-ec2b9d4c8f3e service nova] Releasing lock "refresh_cache-d3dbc3d1-bba7-4803-bacb-02de27a6a4ff" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 875.859697] env[70020]: DEBUG nova.policy [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '05c6f4ba05704430899274c5da6f3140', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '32f7008f815f482f992ddbc4906664b7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 875.995976] env[70020]: DEBUG oslo_concurrency.lockutils [None req-460a3a54-9746-4733-a697-2f6b1e3e430d tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Releasing lock "refresh_cache-24184767-92f7-48b3-bbad-16a596ececde" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 875.996471] env[70020]: DEBUG nova.compute.manager [None req-460a3a54-9746-4733-a697-2f6b1e3e430d tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 875.996656] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-460a3a54-9746-4733-a697-2f6b1e3e430d tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 875.997614] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b93a4fbe-eeef-4d12-a37c-1a5aaf3f8839 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.010590] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-460a3a54-9746-4733-a697-2f6b1e3e430d tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 876.010590] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5cc55be9-9007-4847-85f5-82894ef45b63 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.020759] env[70020]: DEBUG oslo_vmware.api [None req-460a3a54-9746-4733-a697-2f6b1e3e430d tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Waiting for the task: (returnval){ [ 876.020759] env[70020]: value = "task-3618374" [ 876.020759] env[70020]: _type = "Task" [ 876.020759] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.037034] env[70020]: DEBUG oslo_vmware.api [None req-460a3a54-9746-4733-a697-2f6b1e3e430d tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': task-3618374, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.088179] env[70020]: DEBUG oslo_vmware.api [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52d3b94c-9448-5da6-d5bb-958cb252d3d3, 'name': SearchDatastore_Task, 'duration_secs': 0.01477} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.088179] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 876.088179] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 876.088179] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 876.088179] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 876.088179] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 876.088179] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae53be58-7129-4537-9296-b94d0ca46442 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.103176] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 876.103602] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 876.108704] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e45a55f-c6be-47e1-ad22-e35996ee3510 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.112297] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 876.118055] env[70020]: DEBUG oslo_vmware.api [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for the task: (returnval){ [ 876.118055] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]520df3d0-e6b2-5c37-b90e-074c6e85fbe6" [ 876.118055] env[70020]: _type = "Task" [ 876.118055] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.121035] env[70020]: DEBUG oslo_vmware.api [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': task-3618373, 'name': PowerOnVM_Task} progress is 76%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.131438] env[70020]: DEBUG oslo_vmware.api [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]520df3d0-e6b2-5c37-b90e-074c6e85fbe6, 'name': SearchDatastore_Task, 'duration_secs': 0.011452} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.132564] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1f53aef-8f3f-49e6-98c7-0ebd764ed00b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.139697] env[70020]: DEBUG oslo_vmware.api [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for the task: (returnval){ [ 876.139697] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52ffe23d-438b-7c51-529e-988e788ae619" [ 876.139697] env[70020]: _type = "Task" [ 876.139697] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.155042] env[70020]: DEBUG oslo_vmware.api [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ffe23d-438b-7c51-529e-988e788ae619, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.235506] env[70020]: DEBUG nova.compute.manager [req-c09c8b5a-7e42-4ef5-81f3-3a921c9a7143 req-7fe9cc1b-9a9d-4959-a0cc-9e03d8416fcf service nova] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Received event network-vif-deleted-acad913d-4d60-4211-8b2f-e30f6f1d525c {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 876.235506] env[70020]: INFO nova.compute.manager [req-c09c8b5a-7e42-4ef5-81f3-3a921c9a7143 req-7fe9cc1b-9a9d-4959-a0cc-9e03d8416fcf service nova] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Neutron deleted interface acad913d-4d60-4211-8b2f-e30f6f1d525c; detaching it from the instance and deleting it from the info cache [ 876.235506] env[70020]: DEBUG nova.network.neutron [req-c09c8b5a-7e42-4ef5-81f3-3a921c9a7143 req-7fe9cc1b-9a9d-4959-a0cc-9e03d8416fcf service nova] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.287194] env[70020]: DEBUG nova.compute.manager [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 876.307034] env[70020]: DEBUG nova.network.neutron [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Successfully created port: 759215b2-ed99-4281-9bf0-fb9379eab835 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 876.529478] env[70020]: DEBUG nova.compute.manager [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 876.533517] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-445f0464-a306-4363-9df8-a9a271d16f71 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.536545] env[70020]: DEBUG oslo_vmware.api [None req-460a3a54-9746-4733-a697-2f6b1e3e430d tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': task-3618374, 'name': PowerOffVM_Task, 'duration_secs': 0.256051} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.540073] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-460a3a54-9746-4733-a697-2f6b1e3e430d tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 876.540073] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-460a3a54-9746-4733-a697-2f6b1e3e430d tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 876.540482] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-70404621-f9c0-4db1-a590-861de46aa689 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.583061] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-460a3a54-9746-4733-a697-2f6b1e3e430d tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 876.583302] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-460a3a54-9746-4733-a697-2f6b1e3e430d tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 876.583481] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-460a3a54-9746-4733-a697-2f6b1e3e430d tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Deleting the datastore file [datastore2] 24184767-92f7-48b3-bbad-16a596ececde {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 876.584158] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8fd49661-b4ac-45eb-9ebd-61e276af4d7d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.596329] env[70020]: DEBUG oslo_vmware.api [None req-460a3a54-9746-4733-a697-2f6b1e3e430d tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Waiting for the task: (returnval){ [ 876.596329] env[70020]: value = "task-3618376" [ 876.596329] env[70020]: _type = "Task" [ 876.596329] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.611906] env[70020]: DEBUG oslo_vmware.api [None req-460a3a54-9746-4733-a697-2f6b1e3e430d tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': task-3618376, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.622026] env[70020]: DEBUG oslo_vmware.api [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': task-3618373, 'name': PowerOnVM_Task, 'duration_secs': 0.987561} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.622026] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 876.622026] env[70020]: INFO nova.compute.manager [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Took 9.33 seconds to spawn the instance on the hypervisor. [ 876.622026] env[70020]: DEBUG nova.compute.manager [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 876.622026] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7143d846-fa66-426a-a248-ee951dc03849 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.653802] env[70020]: DEBUG oslo_vmware.api [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ffe23d-438b-7c51-529e-988e788ae619, 'name': SearchDatastore_Task, 'duration_secs': 0.01062} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.654153] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 876.655945] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] d3dbc3d1-bba7-4803-bacb-02de27a6a4ff/d3dbc3d1-bba7-4803-bacb-02de27a6a4ff.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 876.655945] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3366454f-3344-4662-bb51-ac76b9575909 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.665657] env[70020]: DEBUG oslo_vmware.api [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for the task: (returnval){ [ 876.665657] env[70020]: value = "task-3618377" [ 876.665657] env[70020]: _type = "Task" [ 876.665657] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.677224] env[70020]: DEBUG oslo_vmware.api [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618377, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.696488] env[70020]: DEBUG nova.network.neutron [-] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.742022] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3b582877-2e19-491b-8d7c-4eb3099219d4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.752956] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8af743da-58c3-4b2e-8eca-7453f3433d70 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.799473] env[70020]: DEBUG nova.compute.manager [req-c09c8b5a-7e42-4ef5-81f3-3a921c9a7143 req-7fe9cc1b-9a9d-4959-a0cc-9e03d8416fcf service nova] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Detach interface failed, port_id=acad913d-4d60-4211-8b2f-e30f6f1d525c, reason: Instance 16c45b86-317a-4d0c-a402-51c85af37a5b could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 876.898316] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee05e975-8aad-4573-b608-54c83618c5d1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.910301] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5426b01f-5d68-4981-a2f7-36682d009c53 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.942639] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecca900f-ad2c-48fc-998e-c93a18c42b9b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.951605] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-992a4602-5711-49a4-97eb-61eca7e116b2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.967795] env[70020]: DEBUG nova.compute.provider_tree [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 877.052370] env[70020]: INFO nova.compute.manager [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] instance snapshotting [ 877.055370] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76511de7-a6a6-4449-b43d-72e696455993 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.076262] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dae73e3-e409-431a-a4fb-35f43ad68759 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.110962] env[70020]: DEBUG oslo_vmware.api [None req-460a3a54-9746-4733-a697-2f6b1e3e430d tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Task: {'id': task-3618376, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.129576} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.110962] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-460a3a54-9746-4733-a697-2f6b1e3e430d tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 877.110962] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-460a3a54-9746-4733-a697-2f6b1e3e430d tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 877.110962] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-460a3a54-9746-4733-a697-2f6b1e3e430d tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 877.110962] env[70020]: INFO nova.compute.manager [None req-460a3a54-9746-4733-a697-2f6b1e3e430d tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Took 1.11 seconds to destroy the instance on the hypervisor. [ 877.110962] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-460a3a54-9746-4733-a697-2f6b1e3e430d tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 877.111297] env[70020]: DEBUG nova.compute.manager [-] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 877.111297] env[70020]: DEBUG nova.network.neutron [-] [instance: 24184767-92f7-48b3-bbad-16a596ececde] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 877.131735] env[70020]: DEBUG nova.network.neutron [-] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 877.141834] env[70020]: INFO nova.compute.manager [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Took 47.85 seconds to build instance. [ 877.178655] env[70020]: DEBUG oslo_vmware.api [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618377, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.199692] env[70020]: INFO nova.compute.manager [-] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Took 1.41 seconds to deallocate network for instance. [ 877.304630] env[70020]: DEBUG nova.compute.manager [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 877.327193] env[70020]: DEBUG nova.virt.hardware [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 877.327449] env[70020]: DEBUG nova.virt.hardware [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 877.327604] env[70020]: DEBUG nova.virt.hardware [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 877.327821] env[70020]: DEBUG nova.virt.hardware [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 877.327924] env[70020]: DEBUG nova.virt.hardware [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 877.328124] env[70020]: DEBUG nova.virt.hardware [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 877.328345] env[70020]: DEBUG nova.virt.hardware [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 877.328502] env[70020]: DEBUG nova.virt.hardware [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 877.328669] env[70020]: DEBUG nova.virt.hardware [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 877.328823] env[70020]: DEBUG nova.virt.hardware [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 877.328994] env[70020]: DEBUG nova.virt.hardware [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 877.329995] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6889eda3-7c0d-4d37-b652-d483e9b10117 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.338265] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97283a9c-c13c-4851-b35f-86a32400827a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.471524] env[70020]: DEBUG nova.scheduler.client.report [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 877.589178] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Creating Snapshot of the VM instance {{(pid=70020) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 877.589495] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-1bb18a17-b8c2-470c-98eb-a0388edc7ac6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.598115] env[70020]: DEBUG oslo_vmware.api [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Waiting for the task: (returnval){ [ 877.598115] env[70020]: value = "task-3618378" [ 877.598115] env[70020]: _type = "Task" [ 877.598115] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.607038] env[70020]: DEBUG oslo_vmware.api [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618378, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.639444] env[70020]: DEBUG nova.network.neutron [-] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 877.644293] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ed6e9213-aae2-4234-8e5d-3d35624efdee tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Lock "1d9218db-05d8-4e33-837f-e9865946237f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.696s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 877.677123] env[70020]: DEBUG oslo_vmware.api [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618377, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.573545} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.677396] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] d3dbc3d1-bba7-4803-bacb-02de27a6a4ff/d3dbc3d1-bba7-4803-bacb-02de27a6a4ff.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 877.677654] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 877.677972] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d7dd8ee8-4d1e-4026-8068-128843ea675f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.687738] env[70020]: DEBUG oslo_vmware.api [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for the task: (returnval){ [ 877.687738] env[70020]: value = "task-3618379" [ 877.687738] env[70020]: _type = "Task" [ 877.687738] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.698166] env[70020]: DEBUG oslo_vmware.api [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618379, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.706497] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b327bb9d-0787-430c-b40d-be2968095744 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 877.803307] env[70020]: DEBUG nova.network.neutron [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Successfully updated port: 759215b2-ed99-4281-9bf0-fb9379eab835 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 877.976814] env[70020]: DEBUG oslo_concurrency.lockutils [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.702s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 877.977383] env[70020]: DEBUG nova.compute.manager [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 877.980624] env[70020]: DEBUG oslo_concurrency.lockutils [None req-84b47043-8591-4bb1-a621-67ff3b2d6710 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.664s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 877.980844] env[70020]: DEBUG nova.objects.instance [None req-84b47043-8591-4bb1-a621-67ff3b2d6710 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Lazy-loading 'resources' on Instance uuid 29d41731-4ae2-4cc4-bfda-b7356922c8ff {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 878.109545] env[70020]: DEBUG oslo_vmware.api [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618378, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.141573] env[70020]: INFO nova.compute.manager [-] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Took 1.03 seconds to deallocate network for instance. [ 878.146514] env[70020]: DEBUG nova.compute.manager [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 878.200169] env[70020]: DEBUG oslo_vmware.api [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618379, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086974} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.200854] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 878.201315] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b95792a7-a1fe-4b4d-92c7-ec2339c89cc6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.225630] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] d3dbc3d1-bba7-4803-bacb-02de27a6a4ff/d3dbc3d1-bba7-4803-bacb-02de27a6a4ff.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 878.226421] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a943e4a5-295f-420c-9096-57514b53b1b9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.247685] env[70020]: DEBUG oslo_vmware.api [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for the task: (returnval){ [ 878.247685] env[70020]: value = "task-3618380" [ 878.247685] env[70020]: _type = "Task" [ 878.247685] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.257645] env[70020]: DEBUG oslo_vmware.api [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618380, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.275867] env[70020]: DEBUG nova.compute.manager [req-b0394a23-34be-4288-8fce-ec7c62309ead req-c582885d-ae85-407c-b9de-13fd4ac2a62b service nova] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Received event network-changed-d219e46c-ef15-4dec-a495-31e92d46d0c0 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 878.275971] env[70020]: DEBUG nova.compute.manager [req-b0394a23-34be-4288-8fce-ec7c62309ead req-c582885d-ae85-407c-b9de-13fd4ac2a62b service nova] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Refreshing instance network info cache due to event network-changed-d219e46c-ef15-4dec-a495-31e92d46d0c0. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 878.276139] env[70020]: DEBUG oslo_concurrency.lockutils [req-b0394a23-34be-4288-8fce-ec7c62309ead req-c582885d-ae85-407c-b9de-13fd4ac2a62b service nova] Acquiring lock "refresh_cache-1d9218db-05d8-4e33-837f-e9865946237f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.276290] env[70020]: DEBUG oslo_concurrency.lockutils [req-b0394a23-34be-4288-8fce-ec7c62309ead req-c582885d-ae85-407c-b9de-13fd4ac2a62b service nova] Acquired lock "refresh_cache-1d9218db-05d8-4e33-837f-e9865946237f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 878.276441] env[70020]: DEBUG nova.network.neutron [req-b0394a23-34be-4288-8fce-ec7c62309ead req-c582885d-ae85-407c-b9de-13fd4ac2a62b service nova] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Refreshing network info cache for port d219e46c-ef15-4dec-a495-31e92d46d0c0 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 878.308184] env[70020]: DEBUG oslo_concurrency.lockutils [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Acquiring lock "refresh_cache-422ca332-5952-443c-a22e-67b1b45df5b9" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.308184] env[70020]: DEBUG oslo_concurrency.lockutils [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Acquired lock "refresh_cache-422ca332-5952-443c-a22e-67b1b45df5b9" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 878.308184] env[70020]: DEBUG nova.network.neutron [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 878.457601] env[70020]: DEBUG nova.compute.manager [req-b7f069e5-48dc-4f47-87ce-c58c1d2fbdad req-dd300576-9cda-4c2e-837b-af9a8ffc7fac service nova] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Received event network-vif-plugged-759215b2-ed99-4281-9bf0-fb9379eab835 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 878.457890] env[70020]: DEBUG oslo_concurrency.lockutils [req-b7f069e5-48dc-4f47-87ce-c58c1d2fbdad req-dd300576-9cda-4c2e-837b-af9a8ffc7fac service nova] Acquiring lock "422ca332-5952-443c-a22e-67b1b45df5b9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 878.458160] env[70020]: DEBUG oslo_concurrency.lockutils [req-b7f069e5-48dc-4f47-87ce-c58c1d2fbdad req-dd300576-9cda-4c2e-837b-af9a8ffc7fac service nova] Lock "422ca332-5952-443c-a22e-67b1b45df5b9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 878.458337] env[70020]: DEBUG oslo_concurrency.lockutils [req-b7f069e5-48dc-4f47-87ce-c58c1d2fbdad req-dd300576-9cda-4c2e-837b-af9a8ffc7fac service nova] Lock "422ca332-5952-443c-a22e-67b1b45df5b9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 878.458557] env[70020]: DEBUG nova.compute.manager [req-b7f069e5-48dc-4f47-87ce-c58c1d2fbdad req-dd300576-9cda-4c2e-837b-af9a8ffc7fac service nova] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] No waiting events found dispatching network-vif-plugged-759215b2-ed99-4281-9bf0-fb9379eab835 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 878.458734] env[70020]: WARNING nova.compute.manager [req-b7f069e5-48dc-4f47-87ce-c58c1d2fbdad req-dd300576-9cda-4c2e-837b-af9a8ffc7fac service nova] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Received unexpected event network-vif-plugged-759215b2-ed99-4281-9bf0-fb9379eab835 for instance with vm_state building and task_state spawning. [ 878.458916] env[70020]: DEBUG nova.compute.manager [req-b7f069e5-48dc-4f47-87ce-c58c1d2fbdad req-dd300576-9cda-4c2e-837b-af9a8ffc7fac service nova] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Received event network-changed-759215b2-ed99-4281-9bf0-fb9379eab835 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 878.459114] env[70020]: DEBUG nova.compute.manager [req-b7f069e5-48dc-4f47-87ce-c58c1d2fbdad req-dd300576-9cda-4c2e-837b-af9a8ffc7fac service nova] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Refreshing instance network info cache due to event network-changed-759215b2-ed99-4281-9bf0-fb9379eab835. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 878.459342] env[70020]: DEBUG oslo_concurrency.lockutils [req-b7f069e5-48dc-4f47-87ce-c58c1d2fbdad req-dd300576-9cda-4c2e-837b-af9a8ffc7fac service nova] Acquiring lock "refresh_cache-422ca332-5952-443c-a22e-67b1b45df5b9" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.488967] env[70020]: DEBUG nova.compute.utils [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 878.493607] env[70020]: DEBUG nova.compute.manager [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 878.493781] env[70020]: DEBUG nova.network.neutron [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 878.534357] env[70020]: DEBUG nova.policy [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '291265cdc1164603a9011173b1457c31', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '74b060ffb3ac4ecd95dcd85d4744dc2a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 878.609996] env[70020]: DEBUG oslo_vmware.api [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618378, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.650298] env[70020]: DEBUG oslo_concurrency.lockutils [None req-460a3a54-9746-4733-a697-2f6b1e3e430d tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 878.667231] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 878.699874] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2059faca-6661-400a-9505-6b4a809ce82f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "5c216231-afc5-41df-a243-bb2a17c20bfe" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 878.700147] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2059faca-6661-400a-9505-6b4a809ce82f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "5c216231-afc5-41df-a243-bb2a17c20bfe" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 878.762349] env[70020]: DEBUG oslo_vmware.api [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618380, 'name': ReconfigVM_Task, 'duration_secs': 0.356863} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.766034] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Reconfigured VM instance instance-00000040 to attach disk [datastore1] d3dbc3d1-bba7-4803-bacb-02de27a6a4ff/d3dbc3d1-bba7-4803-bacb-02de27a6a4ff.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 878.766034] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7e358e53-dfce-437c-ac37-49901f475168 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.774098] env[70020]: DEBUG oslo_vmware.api [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for the task: (returnval){ [ 878.774098] env[70020]: value = "task-3618381" [ 878.774098] env[70020]: _type = "Task" [ 878.774098] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.785460] env[70020]: DEBUG oslo_vmware.api [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618381, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.842273] env[70020]: DEBUG nova.network.neutron [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 878.879569] env[70020]: DEBUG nova.network.neutron [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Successfully created port: a59ccbd4-85b3-4a98-8407-29d65fea21f5 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 878.994360] env[70020]: DEBUG nova.compute.manager [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 879.039483] env[70020]: DEBUG nova.network.neutron [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Updating instance_info_cache with network_info: [{"id": "759215b2-ed99-4281-9bf0-fb9379eab835", "address": "fa:16:3e:4a:93:7c", "network": {"id": "f1c779c8-1ed2-48d1-8344-30e93bf527ed", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-625584571-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "32f7008f815f482f992ddbc4906664b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "085fb0ff-9285-4f1d-a008-a14da4844357", "external-id": "nsx-vlan-transportzone-729", "segmentation_id": 729, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap759215b2-ed", "ovs_interfaceid": "759215b2-ed99-4281-9bf0-fb9379eab835", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.101684] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0d63d42-2316-4095-8ced-82b59bd95eba {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.116715] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-589a5391-0135-458d-a024-4246f42caa2c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.120473] env[70020]: DEBUG oslo_vmware.api [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618378, 'name': CreateSnapshot_Task, 'duration_secs': 1.173751} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.120865] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Created Snapshot of the VM instance {{(pid=70020) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 879.122192] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffefeece-ff09-4081-a91d-9cae4da3696e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.151311] env[70020]: DEBUG oslo_concurrency.lockutils [None req-765d3891-ee10-4ef7-8ba2-c3189fc880e4 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Acquiring lock "1d9218db-05d8-4e33-837f-e9865946237f" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 879.151568] env[70020]: DEBUG oslo_concurrency.lockutils [None req-765d3891-ee10-4ef7-8ba2-c3189fc880e4 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Lock "1d9218db-05d8-4e33-837f-e9865946237f" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 879.151797] env[70020]: INFO nova.compute.manager [None req-765d3891-ee10-4ef7-8ba2-c3189fc880e4 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Rebooting instance [ 879.154338] env[70020]: DEBUG nova.network.neutron [req-b0394a23-34be-4288-8fce-ec7c62309ead req-c582885d-ae85-407c-b9de-13fd4ac2a62b service nova] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Updated VIF entry in instance network info cache for port d219e46c-ef15-4dec-a495-31e92d46d0c0. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 879.154742] env[70020]: DEBUG nova.network.neutron [req-b0394a23-34be-4288-8fce-ec7c62309ead req-c582885d-ae85-407c-b9de-13fd4ac2a62b service nova] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Updating instance_info_cache with network_info: [{"id": "d219e46c-ef15-4dec-a495-31e92d46d0c0", "address": "fa:16:3e:7b:73:7b", "network": {"id": "7e18bd4a-4d0c-4e7f-a52d-bbfb68dfe3f1", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-642722899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2a0a96b236e4a7c8f6878d0becfc66b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89f807d9-140f-4a6f-8bce-96795f9482ee", "external-id": "nsx-vlan-transportzone-762", "segmentation_id": 762, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd219e46c-ef", "ovs_interfaceid": "d219e46c-ef15-4dec-a495-31e92d46d0c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.157031] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-413c3ca2-94f0-4759-9222-60ecb804edc1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.173913] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54431ca7-9346-40d1-84c2-64164aa3dbaa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.190782] env[70020]: DEBUG nova.compute.provider_tree [None req-84b47043-8591-4bb1-a621-67ff3b2d6710 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 879.202824] env[70020]: DEBUG nova.compute.utils [None req-2059faca-6661-400a-9505-6b4a809ce82f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 879.284696] env[70020]: DEBUG oslo_vmware.api [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618381, 'name': Rename_Task, 'duration_secs': 0.314062} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.284987] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 879.285321] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bd900612-738e-4081-a7bd-aa4a4f833d3c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.293286] env[70020]: DEBUG oslo_vmware.api [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for the task: (returnval){ [ 879.293286] env[70020]: value = "task-3618382" [ 879.293286] env[70020]: _type = "Task" [ 879.293286] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.301670] env[70020]: DEBUG oslo_vmware.api [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618382, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.542420] env[70020]: DEBUG oslo_concurrency.lockutils [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Releasing lock "refresh_cache-422ca332-5952-443c-a22e-67b1b45df5b9" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 879.542950] env[70020]: DEBUG nova.compute.manager [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Instance network_info: |[{"id": "759215b2-ed99-4281-9bf0-fb9379eab835", "address": "fa:16:3e:4a:93:7c", "network": {"id": "f1c779c8-1ed2-48d1-8344-30e93bf527ed", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-625584571-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "32f7008f815f482f992ddbc4906664b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "085fb0ff-9285-4f1d-a008-a14da4844357", "external-id": "nsx-vlan-transportzone-729", "segmentation_id": 729, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap759215b2-ed", "ovs_interfaceid": "759215b2-ed99-4281-9bf0-fb9379eab835", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 879.543380] env[70020]: DEBUG oslo_concurrency.lockutils [req-b7f069e5-48dc-4f47-87ce-c58c1d2fbdad req-dd300576-9cda-4c2e-837b-af9a8ffc7fac service nova] Acquired lock "refresh_cache-422ca332-5952-443c-a22e-67b1b45df5b9" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 879.543629] env[70020]: DEBUG nova.network.neutron [req-b7f069e5-48dc-4f47-87ce-c58c1d2fbdad req-dd300576-9cda-4c2e-837b-af9a8ffc7fac service nova] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Refreshing network info cache for port 759215b2-ed99-4281-9bf0-fb9379eab835 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 879.545041] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4a:93:7c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '085fb0ff-9285-4f1d-a008-a14da4844357', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '759215b2-ed99-4281-9bf0-fb9379eab835', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 879.553112] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Creating folder: Project (32f7008f815f482f992ddbc4906664b7). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 879.554167] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8804414f-9980-4286-8bc1-ea79d494c5cc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.567933] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Created folder: Project (32f7008f815f482f992ddbc4906664b7) in parent group-v721521. [ 879.568177] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Creating folder: Instances. Parent ref: group-v721707. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 879.568398] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-070be545-5fb5-4c0b-970d-d175ae59c4a5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.578111] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Created folder: Instances in parent group-v721707. [ 879.578346] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 879.578477] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 879.578674] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9f75dc5e-d321-4b3d-9035-ca731a9b9952 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.598915] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 879.598915] env[70020]: value = "task-3618385" [ 879.598915] env[70020]: _type = "Task" [ 879.598915] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.607429] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618385, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.660383] env[70020]: DEBUG oslo_concurrency.lockutils [req-b0394a23-34be-4288-8fce-ec7c62309ead req-c582885d-ae85-407c-b9de-13fd4ac2a62b service nova] Releasing lock "refresh_cache-1d9218db-05d8-4e33-837f-e9865946237f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 879.674137] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Creating linked-clone VM from snapshot {{(pid=70020) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 879.674486] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-48f36d79-76fb-405d-9955-f2dfa4f59e8e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.679380] env[70020]: DEBUG oslo_concurrency.lockutils [None req-765d3891-ee10-4ef7-8ba2-c3189fc880e4 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Acquiring lock "refresh_cache-1d9218db-05d8-4e33-837f-e9865946237f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.679594] env[70020]: DEBUG oslo_concurrency.lockutils [None req-765d3891-ee10-4ef7-8ba2-c3189fc880e4 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Acquired lock "refresh_cache-1d9218db-05d8-4e33-837f-e9865946237f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 879.679806] env[70020]: DEBUG nova.network.neutron [None req-765d3891-ee10-4ef7-8ba2-c3189fc880e4 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 879.685943] env[70020]: DEBUG oslo_vmware.api [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Waiting for the task: (returnval){ [ 879.685943] env[70020]: value = "task-3618386" [ 879.685943] env[70020]: _type = "Task" [ 879.685943] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.695675] env[70020]: DEBUG nova.scheduler.client.report [None req-84b47043-8591-4bb1-a621-67ff3b2d6710 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 879.704561] env[70020]: DEBUG oslo_vmware.api [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618386, 'name': CloneVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.704956] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2059faca-6661-400a-9505-6b4a809ce82f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "5c216231-afc5-41df-a243-bb2a17c20bfe" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.005s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 879.803952] env[70020]: DEBUG oslo_vmware.api [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618382, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.005631] env[70020]: DEBUG nova.compute.manager [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 880.029084] env[70020]: DEBUG nova.virt.hardware [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 880.029348] env[70020]: DEBUG nova.virt.hardware [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 880.029506] env[70020]: DEBUG nova.virt.hardware [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 880.029687] env[70020]: DEBUG nova.virt.hardware [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 880.029858] env[70020]: DEBUG nova.virt.hardware [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 880.029976] env[70020]: DEBUG nova.virt.hardware [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 880.030204] env[70020]: DEBUG nova.virt.hardware [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 880.030360] env[70020]: DEBUG nova.virt.hardware [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 880.030541] env[70020]: DEBUG nova.virt.hardware [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 880.031461] env[70020]: DEBUG nova.virt.hardware [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 880.031461] env[70020]: DEBUG nova.virt.hardware [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 880.031858] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-879fc1eb-f473-407f-8d0e-9ffdf4f8be3a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.041168] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ad563be-6ddc-4631-a2df-159fd2e4cbb5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.114056] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618385, 'name': CreateVM_Task, 'duration_secs': 0.44008} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.114355] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 880.114931] env[70020]: DEBUG oslo_concurrency.lockutils [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.115340] env[70020]: DEBUG oslo_concurrency.lockutils [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 880.115520] env[70020]: DEBUG oslo_concurrency.lockutils [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 880.115787] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e634bf5-02e0-4056-b4f4-913f4a008887 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.121208] env[70020]: DEBUG oslo_vmware.api [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Waiting for the task: (returnval){ [ 880.121208] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]528aa227-039c-a022-445d-86e4b1f8344a" [ 880.121208] env[70020]: _type = "Task" [ 880.121208] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.132020] env[70020]: DEBUG oslo_vmware.api [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]528aa227-039c-a022-445d-86e4b1f8344a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.196066] env[70020]: DEBUG oslo_vmware.api [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618386, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.204689] env[70020]: DEBUG oslo_concurrency.lockutils [None req-84b47043-8591-4bb1-a621-67ff3b2d6710 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.224s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 880.206971] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 38.305s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 880.232686] env[70020]: INFO nova.scheduler.client.report [None req-84b47043-8591-4bb1-a621-67ff3b2d6710 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Deleted allocations for instance 29d41731-4ae2-4cc4-bfda-b7356922c8ff [ 880.300598] env[70020]: DEBUG nova.network.neutron [req-b7f069e5-48dc-4f47-87ce-c58c1d2fbdad req-dd300576-9cda-4c2e-837b-af9a8ffc7fac service nova] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Updated VIF entry in instance network info cache for port 759215b2-ed99-4281-9bf0-fb9379eab835. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 880.301309] env[70020]: DEBUG nova.network.neutron [req-b7f069e5-48dc-4f47-87ce-c58c1d2fbdad req-dd300576-9cda-4c2e-837b-af9a8ffc7fac service nova] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Updating instance_info_cache with network_info: [{"id": "759215b2-ed99-4281-9bf0-fb9379eab835", "address": "fa:16:3e:4a:93:7c", "network": {"id": "f1c779c8-1ed2-48d1-8344-30e93bf527ed", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-625584571-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "32f7008f815f482f992ddbc4906664b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "085fb0ff-9285-4f1d-a008-a14da4844357", "external-id": "nsx-vlan-transportzone-729", "segmentation_id": 729, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap759215b2-ed", "ovs_interfaceid": "759215b2-ed99-4281-9bf0-fb9379eab835", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.311274] env[70020]: DEBUG oslo_vmware.api [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618382, 'name': PowerOnVM_Task, 'duration_secs': 0.891891} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.311924] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 880.312274] env[70020]: INFO nova.compute.manager [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Took 7.95 seconds to spawn the instance on the hypervisor. [ 880.312622] env[70020]: DEBUG nova.compute.manager [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 880.315990] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec0c7a1d-dd62-4815-afcd-27404e2b223a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.495660] env[70020]: DEBUG nova.network.neutron [None req-765d3891-ee10-4ef7-8ba2-c3189fc880e4 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Updating instance_info_cache with network_info: [{"id": "d219e46c-ef15-4dec-a495-31e92d46d0c0", "address": "fa:16:3e:7b:73:7b", "network": {"id": "7e18bd4a-4d0c-4e7f-a52d-bbfb68dfe3f1", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-642722899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2a0a96b236e4a7c8f6878d0becfc66b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89f807d9-140f-4a6f-8bce-96795f9482ee", "external-id": "nsx-vlan-transportzone-762", "segmentation_id": 762, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd219e46c-ef", "ovs_interfaceid": "d219e46c-ef15-4dec-a495-31e92d46d0c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.636371] env[70020]: DEBUG oslo_vmware.api [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]528aa227-039c-a022-445d-86e4b1f8344a, 'name': SearchDatastore_Task, 'duration_secs': 0.011989} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.636640] env[70020]: DEBUG oslo_concurrency.lockutils [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 880.636872] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 880.637131] env[70020]: DEBUG oslo_concurrency.lockutils [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.637327] env[70020]: DEBUG oslo_concurrency.lockutils [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 880.637449] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 880.637712] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f0cf9bf5-9a1d-4efd-95d6-f41bcb3065ec {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.651922] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 880.652148] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 880.652923] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9021910-cc6d-4545-9620-04e6a747d9d6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.658995] env[70020]: DEBUG oslo_vmware.api [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Waiting for the task: (returnval){ [ 880.658995] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52c74c22-3720-ebff-dae4-54105d461793" [ 880.658995] env[70020]: _type = "Task" [ 880.658995] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.667799] env[70020]: DEBUG oslo_vmware.api [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52c74c22-3720-ebff-dae4-54105d461793, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.701597] env[70020]: DEBUG oslo_vmware.api [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618386, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.713628] env[70020]: INFO nova.compute.claims [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 880.745208] env[70020]: DEBUG oslo_concurrency.lockutils [None req-84b47043-8591-4bb1-a621-67ff3b2d6710 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Lock "29d41731-4ae2-4cc4-bfda-b7356922c8ff" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.474s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 880.758583] env[70020]: DEBUG nova.network.neutron [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Successfully updated port: a59ccbd4-85b3-4a98-8407-29d65fea21f5 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 880.811227] env[70020]: DEBUG oslo_concurrency.lockutils [req-b7f069e5-48dc-4f47-87ce-c58c1d2fbdad req-dd300576-9cda-4c2e-837b-af9a8ffc7fac service nova] Releasing lock "refresh_cache-422ca332-5952-443c-a22e-67b1b45df5b9" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 880.827402] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2059faca-6661-400a-9505-6b4a809ce82f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "5c216231-afc5-41df-a243-bb2a17c20bfe" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 880.827402] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2059faca-6661-400a-9505-6b4a809ce82f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "5c216231-afc5-41df-a243-bb2a17c20bfe" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 880.827402] env[70020]: INFO nova.compute.manager [None req-2059faca-6661-400a-9505-6b4a809ce82f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Attaching volume d582b3d1-9fab-425f-83f6-c90095c5e316 to /dev/sdb [ 880.845078] env[70020]: INFO nova.compute.manager [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Took 48.43 seconds to build instance. [ 880.851171] env[70020]: DEBUG nova.compute.manager [req-6fdcb553-1bb3-4cfd-9f36-40b40f333fa4 req-fcb7e70c-b5b7-4426-b5e1-80f66f0c30df service nova] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Received event network-vif-plugged-a59ccbd4-85b3-4a98-8407-29d65fea21f5 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 880.851171] env[70020]: DEBUG oslo_concurrency.lockutils [req-6fdcb553-1bb3-4cfd-9f36-40b40f333fa4 req-fcb7e70c-b5b7-4426-b5e1-80f66f0c30df service nova] Acquiring lock "96966bf2-a9ff-48ba-be3f-c767e7b6eedd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 880.851171] env[70020]: DEBUG oslo_concurrency.lockutils [req-6fdcb553-1bb3-4cfd-9f36-40b40f333fa4 req-fcb7e70c-b5b7-4426-b5e1-80f66f0c30df service nova] Lock "96966bf2-a9ff-48ba-be3f-c767e7b6eedd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 880.851171] env[70020]: DEBUG oslo_concurrency.lockutils [req-6fdcb553-1bb3-4cfd-9f36-40b40f333fa4 req-fcb7e70c-b5b7-4426-b5e1-80f66f0c30df service nova] Lock "96966bf2-a9ff-48ba-be3f-c767e7b6eedd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 880.851171] env[70020]: DEBUG nova.compute.manager [req-6fdcb553-1bb3-4cfd-9f36-40b40f333fa4 req-fcb7e70c-b5b7-4426-b5e1-80f66f0c30df service nova] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] No waiting events found dispatching network-vif-plugged-a59ccbd4-85b3-4a98-8407-29d65fea21f5 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 880.851171] env[70020]: WARNING nova.compute.manager [req-6fdcb553-1bb3-4cfd-9f36-40b40f333fa4 req-fcb7e70c-b5b7-4426-b5e1-80f66f0c30df service nova] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Received unexpected event network-vif-plugged-a59ccbd4-85b3-4a98-8407-29d65fea21f5 for instance with vm_state building and task_state spawning. [ 880.888494] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e97c7cd6-de22-4765-bdb1-f69203039141 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.896913] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3a8c4e8-f039-4831-9163-b1e09af6c749 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.912721] env[70020]: DEBUG nova.virt.block_device [None req-2059faca-6661-400a-9505-6b4a809ce82f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Updating existing volume attachment record: 5d51e02c-c5dc-4086-b864-fedc58a934d9 {{(pid=70020) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 881.002198] env[70020]: DEBUG oslo_concurrency.lockutils [None req-765d3891-ee10-4ef7-8ba2-c3189fc880e4 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Releasing lock "refresh_cache-1d9218db-05d8-4e33-837f-e9865946237f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 881.175011] env[70020]: DEBUG oslo_vmware.api [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52c74c22-3720-ebff-dae4-54105d461793, 'name': SearchDatastore_Task, 'duration_secs': 0.017622} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.175867] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96558b06-6140-41e9-b51f-22774d7033fe {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.185872] env[70020]: DEBUG oslo_vmware.api [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Waiting for the task: (returnval){ [ 881.185872] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5256b463-cee5-b017-adbf-239b0193667d" [ 881.185872] env[70020]: _type = "Task" [ 881.185872] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.203987] env[70020]: DEBUG oslo_vmware.api [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5256b463-cee5-b017-adbf-239b0193667d, 'name': SearchDatastore_Task, 'duration_secs': 0.012712} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.204288] env[70020]: DEBUG oslo_vmware.api [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618386, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.204548] env[70020]: DEBUG oslo_concurrency.lockutils [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 881.204840] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 422ca332-5952-443c-a22e-67b1b45df5b9/422ca332-5952-443c-a22e-67b1b45df5b9.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 881.205119] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f5481d14-3eeb-4e80-8715-fcc37c1d3012 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.213538] env[70020]: DEBUG oslo_vmware.api [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Waiting for the task: (returnval){ [ 881.213538] env[70020]: value = "task-3618390" [ 881.213538] env[70020]: _type = "Task" [ 881.213538] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.225035] env[70020]: INFO nova.compute.resource_tracker [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Updating resource usage from migration b5628a9f-1bd9-44da-91e8-035e91b65f82 [ 881.227778] env[70020]: DEBUG oslo_vmware.api [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Task: {'id': task-3618390, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.261249] env[70020]: DEBUG oslo_concurrency.lockutils [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "refresh_cache-96966bf2-a9ff-48ba-be3f-c767e7b6eedd" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.261426] env[70020]: DEBUG oslo_concurrency.lockutils [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquired lock "refresh_cache-96966bf2-a9ff-48ba-be3f-c767e7b6eedd" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 881.261660] env[70020]: DEBUG nova.network.neutron [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 881.347548] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9f5cae10-eeb3-457f-8501-2f84dc54b1fa tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Lock "d3dbc3d1-bba7-4803-bacb-02de27a6a4ff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.952s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 881.509736] env[70020]: DEBUG nova.compute.manager [None req-765d3891-ee10-4ef7-8ba2-c3189fc880e4 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 881.510109] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feb939c2-907c-410b-a60b-9c3208171cda {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.674104] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7100d9e6-638e-4f44-a958-aca8ef9b0e1a tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Acquiring lock "d3dbc3d1-bba7-4803-bacb-02de27a6a4ff" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 881.674601] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7100d9e6-638e-4f44-a958-aca8ef9b0e1a tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Lock "d3dbc3d1-bba7-4803-bacb-02de27a6a4ff" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 881.674750] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7100d9e6-638e-4f44-a958-aca8ef9b0e1a tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Acquiring lock "d3dbc3d1-bba7-4803-bacb-02de27a6a4ff-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 881.675496] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7100d9e6-638e-4f44-a958-aca8ef9b0e1a tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Lock "d3dbc3d1-bba7-4803-bacb-02de27a6a4ff-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 881.675682] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7100d9e6-638e-4f44-a958-aca8ef9b0e1a tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Lock "d3dbc3d1-bba7-4803-bacb-02de27a6a4ff-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 881.678080] env[70020]: INFO nova.compute.manager [None req-7100d9e6-638e-4f44-a958-aca8ef9b0e1a tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Terminating instance [ 881.707488] env[70020]: DEBUG oslo_vmware.api [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618386, 'name': CloneVM_Task, 'duration_secs': 1.768789} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.707885] env[70020]: INFO nova.virt.vmwareapi.vmops [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Created linked-clone VM from snapshot [ 881.710403] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-203b011b-1609-48d3-a491-66ea1f610e56 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.727197] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Uploading image 3e51d9c3-d3e3-4ae3-8ecf-3a0726817d70 {{(pid=70020) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 881.741055] env[70020]: DEBUG oslo_vmware.api [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Task: {'id': task-3618390, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.756784] env[70020]: DEBUG oslo_vmware.rw_handles [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 881.756784] env[70020]: value = "vm-721710" [ 881.756784] env[70020]: _type = "VirtualMachine" [ 881.756784] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 881.757088] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ac77091c-3f08-496a-b7ad-69215af9b152 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.767108] env[70020]: DEBUG oslo_vmware.rw_handles [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Lease: (returnval){ [ 881.767108] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52894518-5267-a5e7-ffd4-7d666f9c7fdf" [ 881.767108] env[70020]: _type = "HttpNfcLease" [ 881.767108] env[70020]: } obtained for exporting VM: (result){ [ 881.767108] env[70020]: value = "vm-721710" [ 881.767108] env[70020]: _type = "VirtualMachine" [ 881.767108] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 881.767510] env[70020]: DEBUG oslo_vmware.api [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Waiting for the lease: (returnval){ [ 881.767510] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52894518-5267-a5e7-ffd4-7d666f9c7fdf" [ 881.767510] env[70020]: _type = "HttpNfcLease" [ 881.767510] env[70020]: } to be ready. {{(pid=70020) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 881.778571] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 881.778571] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52894518-5267-a5e7-ffd4-7d666f9c7fdf" [ 881.778571] env[70020]: _type = "HttpNfcLease" [ 881.778571] env[70020]: } is ready. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 881.778924] env[70020]: DEBUG oslo_vmware.rw_handles [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 881.778924] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52894518-5267-a5e7-ffd4-7d666f9c7fdf" [ 881.778924] env[70020]: _type = "HttpNfcLease" [ 881.778924] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 881.779843] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea0ba871-c767-45c2-b043-22f492d138c7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.792316] env[70020]: DEBUG oslo_vmware.rw_handles [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52672958-72d5-56b9-9f03-944bd6265763/disk-0.vmdk from lease info. {{(pid=70020) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 881.792413] env[70020]: DEBUG oslo_vmware.rw_handles [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52672958-72d5-56b9-9f03-944bd6265763/disk-0.vmdk for reading. {{(pid=70020) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 881.876771] env[70020]: DEBUG nova.compute.manager [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 881.881276] env[70020]: DEBUG nova.network.neutron [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 881.962915] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-43775941-d783-4108-9b53-fd16da7e9143 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.991236] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77a2ef55-3f19-4230-9636-34b7c6beaaa7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.014021] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Acquiring lock "2ccd34c8-b433-41be-b800-d06a0595bff9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 882.014021] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Lock "2ccd34c8-b433-41be-b800-d06a0595bff9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 882.014021] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74f0827f-cdb5-4035-aeb8-7f812af880fb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.047968] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ee66a48-a518-421e-bcd9-929bde07a1d1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.059523] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c961574-8e8a-4226-9ddd-2bceb9a08670 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.074545] env[70020]: DEBUG nova.compute.provider_tree [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 882.089709] env[70020]: DEBUG nova.network.neutron [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Updating instance_info_cache with network_info: [{"id": "a59ccbd4-85b3-4a98-8407-29d65fea21f5", "address": "fa:16:3e:ae:ed:1c", "network": {"id": "b566519a-edda-4c57-92ca-a702e586c638", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-790170115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74b060ffb3ac4ecd95dcd85d4744dc2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa59ccbd4-85", "ovs_interfaceid": "a59ccbd4-85b3-4a98-8407-29d65fea21f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.186016] env[70020]: DEBUG nova.compute.manager [None req-7100d9e6-638e-4f44-a958-aca8ef9b0e1a tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 882.186254] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-7100d9e6-638e-4f44-a958-aca8ef9b0e1a tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 882.187146] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b1a4dc7-c7fe-412e-bcf6-5b206a56d95f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.196437] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-7100d9e6-638e-4f44-a958-aca8ef9b0e1a tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 882.196786] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1d536cd0-1f93-44da-b705-fbfb1741466c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.204214] env[70020]: DEBUG oslo_vmware.api [None req-7100d9e6-638e-4f44-a958-aca8ef9b0e1a tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for the task: (returnval){ [ 882.204214] env[70020]: value = "task-3618392" [ 882.204214] env[70020]: _type = "Task" [ 882.204214] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.212958] env[70020]: DEBUG oslo_vmware.api [None req-7100d9e6-638e-4f44-a958-aca8ef9b0e1a tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618392, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.226681] env[70020]: DEBUG oslo_vmware.api [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Task: {'id': task-3618390, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.53102} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.226927] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 422ca332-5952-443c-a22e-67b1b45df5b9/422ca332-5952-443c-a22e-67b1b45df5b9.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 882.227163] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 882.227414] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eccd57ad-1ce7-4037-bdc6-fd934f6dda88 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.234504] env[70020]: DEBUG oslo_vmware.api [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Waiting for the task: (returnval){ [ 882.234504] env[70020]: value = "task-3618393" [ 882.234504] env[70020]: _type = "Task" [ 882.234504] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.243315] env[70020]: DEBUG oslo_vmware.api [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Task: {'id': task-3618393, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.412666] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 882.554032] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36317d5e-feb5-4e85-8aab-ddf51af3f92f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.561478] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-765d3891-ee10-4ef7-8ba2-c3189fc880e4 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Doing hard reboot of VM {{(pid=70020) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 882.561947] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-dbbf1fdc-355d-48bc-8ab8-ad4788c8c4bc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.569612] env[70020]: DEBUG oslo_vmware.api [None req-765d3891-ee10-4ef7-8ba2-c3189fc880e4 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Waiting for the task: (returnval){ [ 882.569612] env[70020]: value = "task-3618394" [ 882.569612] env[70020]: _type = "Task" [ 882.569612] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.580848] env[70020]: DEBUG nova.scheduler.client.report [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 882.584922] env[70020]: DEBUG oslo_vmware.api [None req-765d3891-ee10-4ef7-8ba2-c3189fc880e4 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': task-3618394, 'name': ResetVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.593020] env[70020]: DEBUG oslo_concurrency.lockutils [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Releasing lock "refresh_cache-96966bf2-a9ff-48ba-be3f-c767e7b6eedd" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 882.593020] env[70020]: DEBUG nova.compute.manager [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Instance network_info: |[{"id": "a59ccbd4-85b3-4a98-8407-29d65fea21f5", "address": "fa:16:3e:ae:ed:1c", "network": {"id": "b566519a-edda-4c57-92ca-a702e586c638", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-790170115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74b060ffb3ac4ecd95dcd85d4744dc2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa59ccbd4-85", "ovs_interfaceid": "a59ccbd4-85b3-4a98-8407-29d65fea21f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 882.593020] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ae:ed:1c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1002b79b-224e-41e3-a484-4245a767147a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a59ccbd4-85b3-4a98-8407-29d65fea21f5', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 882.601880] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Creating folder: Project (74b060ffb3ac4ecd95dcd85d4744dc2a). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 882.603396] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-772800f5-c5b5-40ea-8c42-74fdf5824a61 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.620882] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Created folder: Project (74b060ffb3ac4ecd95dcd85d4744dc2a) in parent group-v721521. [ 882.621101] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Creating folder: Instances. Parent ref: group-v721713. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 882.621358] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c9fbe7f6-be09-456d-8211-1a1e8178c298 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.635277] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Created folder: Instances in parent group-v721713. [ 882.635766] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 882.636242] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 882.636631] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-730a70ce-4fa8-4293-be28-866c6d29a0d3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.659383] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 882.659383] env[70020]: value = "task-3618397" [ 882.659383] env[70020]: _type = "Task" [ 882.659383] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.669739] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618397, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.716760] env[70020]: DEBUG oslo_vmware.api [None req-7100d9e6-638e-4f44-a958-aca8ef9b0e1a tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618392, 'name': PowerOffVM_Task, 'duration_secs': 0.346259} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.719020] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-7100d9e6-638e-4f44-a958-aca8ef9b0e1a tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 882.719020] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-7100d9e6-638e-4f44-a958-aca8ef9b0e1a tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 882.719020] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f99f0f86-db9e-4645-8ccc-16bf48ff1b57 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.746994] env[70020]: DEBUG oslo_vmware.api [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Task: {'id': task-3618393, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.137708} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.747530] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 882.748546] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6a6cf59-7729-49ba-bab6-5714ca643cfb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.773464] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Reconfiguring VM instance instance-00000041 to attach disk [datastore2] 422ca332-5952-443c-a22e-67b1b45df5b9/422ca332-5952-443c-a22e-67b1b45df5b9.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 882.774048] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-58c4748e-8467-4b03-b7d4-dbfb693144c5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.796488] env[70020]: DEBUG oslo_vmware.api [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Waiting for the task: (returnval){ [ 882.796488] env[70020]: value = "task-3618399" [ 882.796488] env[70020]: _type = "Task" [ 882.796488] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.811649] env[70020]: DEBUG oslo_vmware.api [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Task: {'id': task-3618399, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.813750] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-7100d9e6-638e-4f44-a958-aca8ef9b0e1a tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 882.814129] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-7100d9e6-638e-4f44-a958-aca8ef9b0e1a tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 882.814406] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-7100d9e6-638e-4f44-a958-aca8ef9b0e1a tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Deleting the datastore file [datastore1] d3dbc3d1-bba7-4803-bacb-02de27a6a4ff {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 882.814791] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a44742f3-21c9-4daf-a7ff-f58f0f5eb051 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.825474] env[70020]: DEBUG oslo_vmware.api [None req-7100d9e6-638e-4f44-a958-aca8ef9b0e1a tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for the task: (returnval){ [ 882.825474] env[70020]: value = "task-3618400" [ 882.825474] env[70020]: _type = "Task" [ 882.825474] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.839145] env[70020]: DEBUG oslo_vmware.api [None req-7100d9e6-638e-4f44-a958-aca8ef9b0e1a tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618400, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.892499] env[70020]: DEBUG nova.compute.manager [req-22c5ecde-164d-4cfe-b9a1-91d4a162f2be req-e31dee04-cc89-4fdc-b923-1205e8ff05ec service nova] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Received event network-changed-a59ccbd4-85b3-4a98-8407-29d65fea21f5 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 882.893127] env[70020]: DEBUG nova.compute.manager [req-22c5ecde-164d-4cfe-b9a1-91d4a162f2be req-e31dee04-cc89-4fdc-b923-1205e8ff05ec service nova] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Refreshing instance network info cache due to event network-changed-a59ccbd4-85b3-4a98-8407-29d65fea21f5. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 882.896546] env[70020]: DEBUG oslo_concurrency.lockutils [req-22c5ecde-164d-4cfe-b9a1-91d4a162f2be req-e31dee04-cc89-4fdc-b923-1205e8ff05ec service nova] Acquiring lock "refresh_cache-96966bf2-a9ff-48ba-be3f-c767e7b6eedd" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.896546] env[70020]: DEBUG oslo_concurrency.lockutils [req-22c5ecde-164d-4cfe-b9a1-91d4a162f2be req-e31dee04-cc89-4fdc-b923-1205e8ff05ec service nova] Acquired lock "refresh_cache-96966bf2-a9ff-48ba-be3f-c767e7b6eedd" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 882.896546] env[70020]: DEBUG nova.network.neutron [req-22c5ecde-164d-4cfe-b9a1-91d4a162f2be req-e31dee04-cc89-4fdc-b923-1205e8ff05ec service nova] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Refreshing network info cache for port a59ccbd4-85b3-4a98-8407-29d65fea21f5 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 883.083810] env[70020]: DEBUG oslo_vmware.api [None req-765d3891-ee10-4ef7-8ba2-c3189fc880e4 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': task-3618394, 'name': ResetVM_Task, 'duration_secs': 0.123585} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.083810] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-765d3891-ee10-4ef7-8ba2-c3189fc880e4 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Did hard reboot of VM {{(pid=70020) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 883.085153] env[70020]: DEBUG nova.compute.manager [None req-765d3891-ee10-4ef7-8ba2-c3189fc880e4 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 883.086567] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b6cbd4e-0270-4286-b75f-fcc5ee9a3cbc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.092459] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.885s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 883.092907] env[70020]: INFO nova.compute.manager [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Migrating [ 883.104723] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3b3e825c-71b1-4944-b679-2645c48ce2c0 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.319s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 883.106047] env[70020]: DEBUG nova.objects.instance [None req-3b3e825c-71b1-4944-b679-2645c48ce2c0 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Lazy-loading 'resources' on Instance uuid bb4e4986-af2a-4832-9ec7-777bca863dce {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 883.171207] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618397, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.309163] env[70020]: DEBUG oslo_vmware.api [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Task: {'id': task-3618399, 'name': ReconfigVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.336739] env[70020]: DEBUG oslo_vmware.api [None req-7100d9e6-638e-4f44-a958-aca8ef9b0e1a tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618400, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.328175} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.337350] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-7100d9e6-638e-4f44-a958-aca8ef9b0e1a tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 883.337862] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-7100d9e6-638e-4f44-a958-aca8ef9b0e1a tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 883.338193] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-7100d9e6-638e-4f44-a958-aca8ef9b0e1a tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 883.338570] env[70020]: INFO nova.compute.manager [None req-7100d9e6-638e-4f44-a958-aca8ef9b0e1a tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Took 1.15 seconds to destroy the instance on the hypervisor. [ 883.338987] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7100d9e6-638e-4f44-a958-aca8ef9b0e1a tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 883.339417] env[70020]: DEBUG nova.compute.manager [-] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 883.339646] env[70020]: DEBUG nova.network.neutron [-] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 883.637698] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquiring lock "refresh_cache-b53f55c1-1867-410c-9c53-f552ff30d697" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.637861] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquired lock "refresh_cache-b53f55c1-1867-410c-9c53-f552ff30d697" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 883.638045] env[70020]: DEBUG nova.network.neutron [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 883.646402] env[70020]: DEBUG oslo_concurrency.lockutils [None req-765d3891-ee10-4ef7-8ba2-c3189fc880e4 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Lock "1d9218db-05d8-4e33-837f-e9865946237f" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.495s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 883.671967] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618397, 'name': CreateVM_Task, 'duration_secs': 0.520613} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.672172] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 883.677017] env[70020]: DEBUG oslo_concurrency.lockutils [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.677317] env[70020]: DEBUG oslo_concurrency.lockutils [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 883.677667] env[70020]: DEBUG oslo_concurrency.lockutils [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 883.677953] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-421abf76-2eeb-44fc-bc1c-d222eef1853e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.685644] env[70020]: DEBUG oslo_vmware.api [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 883.685644] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]526912f0-0a93-4278-f4d0-495eaa8e9957" [ 883.685644] env[70020]: _type = "Task" [ 883.685644] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.704248] env[70020]: DEBUG oslo_vmware.api [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]526912f0-0a93-4278-f4d0-495eaa8e9957, 'name': SearchDatastore_Task, 'duration_secs': 0.013319} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.704248] env[70020]: DEBUG oslo_concurrency.lockutils [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 883.704248] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 883.704642] env[70020]: DEBUG oslo_concurrency.lockutils [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.704792] env[70020]: DEBUG oslo_concurrency.lockutils [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 883.704982] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 883.708070] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2fcc37cb-6e7c-4ddd-a857-f3caf801aa50 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.718474] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 883.718787] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 883.722906] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40a5837a-2aab-4e0d-a772-5cb43dafbac6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.729180] env[70020]: DEBUG oslo_vmware.api [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 883.729180] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52b217e8-7fec-fd73-31cf-ce8f2dd6727a" [ 883.729180] env[70020]: _type = "Task" [ 883.729180] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.739188] env[70020]: DEBUG oslo_vmware.api [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b217e8-7fec-fd73-31cf-ce8f2dd6727a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.811951] env[70020]: DEBUG oslo_vmware.api [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Task: {'id': task-3618399, 'name': ReconfigVM_Task, 'duration_secs': 0.558027} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.812257] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Reconfigured VM instance instance-00000041 to attach disk [datastore2] 422ca332-5952-443c-a22e-67b1b45df5b9/422ca332-5952-443c-a22e-67b1b45df5b9.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 883.812950] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-931a145f-d63f-4818-b8a6-eb11edad70fd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.824567] env[70020]: DEBUG oslo_vmware.api [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Waiting for the task: (returnval){ [ 883.824567] env[70020]: value = "task-3618402" [ 883.824567] env[70020]: _type = "Task" [ 883.824567] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.842347] env[70020]: DEBUG oslo_vmware.api [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Task: {'id': task-3618402, 'name': Rename_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.931393] env[70020]: DEBUG nova.network.neutron [req-22c5ecde-164d-4cfe-b9a1-91d4a162f2be req-e31dee04-cc89-4fdc-b923-1205e8ff05ec service nova] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Updated VIF entry in instance network info cache for port a59ccbd4-85b3-4a98-8407-29d65fea21f5. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 883.931785] env[70020]: DEBUG nova.network.neutron [req-22c5ecde-164d-4cfe-b9a1-91d4a162f2be req-e31dee04-cc89-4fdc-b923-1205e8ff05ec service nova] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Updating instance_info_cache with network_info: [{"id": "a59ccbd4-85b3-4a98-8407-29d65fea21f5", "address": "fa:16:3e:ae:ed:1c", "network": {"id": "b566519a-edda-4c57-92ca-a702e586c638", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-790170115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74b060ffb3ac4ecd95dcd85d4744dc2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa59ccbd4-85", "ovs_interfaceid": "a59ccbd4-85b3-4a98-8407-29d65fea21f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.140482] env[70020]: DEBUG nova.network.neutron [-] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.226406] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ec47589-8739-4874-8c91-6a3a910832a4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.239464] env[70020]: DEBUG oslo_vmware.api [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b217e8-7fec-fd73-31cf-ce8f2dd6727a, 'name': SearchDatastore_Task, 'duration_secs': 0.020683} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.242145] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e810f62-eba6-4674-af7a-747b0c2f3a2d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.245630] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c622b841-b596-4e68-9559-f06c1b994117 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.253798] env[70020]: DEBUG oslo_vmware.api [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 884.253798] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52882af0-4ae9-cf80-bd02-0108e2adb23a" [ 884.253798] env[70020]: _type = "Task" [ 884.253798] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.285402] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1860f17b-8cdc-41b0-a564-566c28522477 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.299724] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3beaf40-234c-41a3-a28f-da37598660ea {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.109752] env[70020]: DEBUG oslo_vmware.api [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52882af0-4ae9-cf80-bd02-0108e2adb23a, 'name': SearchDatastore_Task, 'duration_secs': 0.023791} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.114028] env[70020]: DEBUG oslo_concurrency.lockutils [req-22c5ecde-164d-4cfe-b9a1-91d4a162f2be req-e31dee04-cc89-4fdc-b923-1205e8ff05ec service nova] Releasing lock "refresh_cache-96966bf2-a9ff-48ba-be3f-c767e7b6eedd" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 885.115238] env[70020]: INFO nova.compute.manager [-] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Took 1.78 seconds to deallocate network for instance. [ 885.115238] env[70020]: DEBUG oslo_concurrency.lockutils [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 885.115710] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 96966bf2-a9ff-48ba-be3f-c767e7b6eedd/96966bf2-a9ff-48ba-be3f-c767e7b6eedd.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 885.117117] env[70020]: DEBUG nova.compute.manager [req-a1f9cddf-71bc-493e-ba15-e8e61ff63a6b req-e3f488eb-3283-4381-88a2-3913b231f33e service nova] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Received event network-changed-d219e46c-ef15-4dec-a495-31e92d46d0c0 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 885.117440] env[70020]: DEBUG nova.compute.manager [req-a1f9cddf-71bc-493e-ba15-e8e61ff63a6b req-e3f488eb-3283-4381-88a2-3913b231f33e service nova] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Refreshing instance network info cache due to event network-changed-d219e46c-ef15-4dec-a495-31e92d46d0c0. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 885.117805] env[70020]: DEBUG oslo_concurrency.lockutils [req-a1f9cddf-71bc-493e-ba15-e8e61ff63a6b req-e3f488eb-3283-4381-88a2-3913b231f33e service nova] Acquiring lock "refresh_cache-1d9218db-05d8-4e33-837f-e9865946237f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.118095] env[70020]: DEBUG oslo_concurrency.lockutils [req-a1f9cddf-71bc-493e-ba15-e8e61ff63a6b req-e3f488eb-3283-4381-88a2-3913b231f33e service nova] Acquired lock "refresh_cache-1d9218db-05d8-4e33-837f-e9865946237f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 885.118386] env[70020]: DEBUG nova.network.neutron [req-a1f9cddf-71bc-493e-ba15-e8e61ff63a6b req-e3f488eb-3283-4381-88a2-3913b231f33e service nova] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Refreshing network info cache for port d219e46c-ef15-4dec-a495-31e92d46d0c0 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 885.133035] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b46f0fc0-9327-4d07-90c6-609e085419f6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.145450] env[70020]: DEBUG nova.compute.provider_tree [None req-3b3e825c-71b1-4944-b679-2645c48ce2c0 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 885.153283] env[70020]: DEBUG oslo_vmware.api [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Task: {'id': task-3618402, 'name': Rename_Task, 'duration_secs': 0.293217} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.155397] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 885.155397] env[70020]: DEBUG oslo_vmware.api [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 885.155397] env[70020]: value = "task-3618403" [ 885.155397] env[70020]: _type = "Task" [ 885.155397] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.155397] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-29bb78f4-ec4c-476b-8f2a-74a8823abdc2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.168366] env[70020]: DEBUG oslo_vmware.api [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Waiting for the task: (returnval){ [ 885.168366] env[70020]: value = "task-3618404" [ 885.168366] env[70020]: _type = "Task" [ 885.168366] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.179138] env[70020]: DEBUG oslo_vmware.api [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Task: {'id': task-3618404, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.216709] env[70020]: DEBUG nova.compute.manager [req-2e5f4f30-1141-4b2c-b518-a1d8b73cb914 req-5660f4c4-df94-4cc3-ba75-f3e737235f2e service nova] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Received event network-vif-deleted-1217e13a-a21e-45bc-96a7-abdecc27de51 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 885.360161] env[70020]: DEBUG nova.network.neutron [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Updating instance_info_cache with network_info: [{"id": "ac1e36da-5de5-4451-a9e7-39165ab5f152", "address": "fa:16:3e:c2:87:4b", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.51", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac1e36da-5d", "ovs_interfaceid": "ac1e36da-5de5-4451-a9e7-39165ab5f152", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.468513] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-2059faca-6661-400a-9505-6b4a809ce82f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Volume attach. Driver type: vmdk {{(pid=70020) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 885.468817] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-2059faca-6661-400a-9505-6b4a809ce82f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721712', 'volume_id': 'd582b3d1-9fab-425f-83f6-c90095c5e316', 'name': 'volume-d582b3d1-9fab-425f-83f6-c90095c5e316', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5c216231-afc5-41df-a243-bb2a17c20bfe', 'attached_at': '', 'detached_at': '', 'volume_id': 'd582b3d1-9fab-425f-83f6-c90095c5e316', 'serial': 'd582b3d1-9fab-425f-83f6-c90095c5e316'} {{(pid=70020) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 885.469727] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73892ca4-ea4e-4f4c-a128-e3b9a54389fe {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.488180] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9632821a-ce78-4ab5-8a99-8bc7bf6b3e41 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.519917] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-2059faca-6661-400a-9505-6b4a809ce82f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Reconfiguring VM instance instance-00000037 to attach disk [datastore2] volume-d582b3d1-9fab-425f-83f6-c90095c5e316/volume-d582b3d1-9fab-425f-83f6-c90095c5e316.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 885.520313] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-953f7720-447c-476a-abb7-7cf5c1039e24 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.539944] env[70020]: DEBUG oslo_vmware.api [None req-2059faca-6661-400a-9505-6b4a809ce82f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 885.539944] env[70020]: value = "task-3618405" [ 885.539944] env[70020]: _type = "Task" [ 885.539944] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.549406] env[70020]: DEBUG oslo_vmware.api [None req-2059faca-6661-400a-9505-6b4a809ce82f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618405, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.630327] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7100d9e6-638e-4f44-a958-aca8ef9b0e1a tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 885.649145] env[70020]: DEBUG nova.scheduler.client.report [None req-3b3e825c-71b1-4944-b679-2645c48ce2c0 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 885.668989] env[70020]: DEBUG oslo_vmware.api [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618403, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.682212] env[70020]: DEBUG oslo_vmware.api [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Task: {'id': task-3618404, 'name': PowerOnVM_Task} progress is 88%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.720676] env[70020]: INFO nova.compute.manager [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Rebuilding instance [ 885.786304] env[70020]: DEBUG nova.compute.manager [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 885.787433] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2c7a8bb-996d-439b-89b8-a2ea36124afc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.861776] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Releasing lock "refresh_cache-b53f55c1-1867-410c-9c53-f552ff30d697" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 886.017334] env[70020]: DEBUG nova.network.neutron [req-a1f9cddf-71bc-493e-ba15-e8e61ff63a6b req-e3f488eb-3283-4381-88a2-3913b231f33e service nova] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Updated VIF entry in instance network info cache for port d219e46c-ef15-4dec-a495-31e92d46d0c0. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 886.017742] env[70020]: DEBUG nova.network.neutron [req-a1f9cddf-71bc-493e-ba15-e8e61ff63a6b req-e3f488eb-3283-4381-88a2-3913b231f33e service nova] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Updating instance_info_cache with network_info: [{"id": "d219e46c-ef15-4dec-a495-31e92d46d0c0", "address": "fa:16:3e:7b:73:7b", "network": {"id": "7e18bd4a-4d0c-4e7f-a52d-bbfb68dfe3f1", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-642722899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2a0a96b236e4a7c8f6878d0becfc66b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89f807d9-140f-4a6f-8bce-96795f9482ee", "external-id": "nsx-vlan-transportzone-762", "segmentation_id": 762, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd219e46c-ef", "ovs_interfaceid": "d219e46c-ef15-4dec-a495-31e92d46d0c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.051020] env[70020]: DEBUG oslo_vmware.api [None req-2059faca-6661-400a-9505-6b4a809ce82f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618405, 'name': ReconfigVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.135715] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b9a07698-27fb-4a3a-a88f-9308c931d907 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Acquiring lock "1d9218db-05d8-4e33-837f-e9865946237f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 886.136073] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b9a07698-27fb-4a3a-a88f-9308c931d907 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Lock "1d9218db-05d8-4e33-837f-e9865946237f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 886.136244] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b9a07698-27fb-4a3a-a88f-9308c931d907 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Acquiring lock "1d9218db-05d8-4e33-837f-e9865946237f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 886.136409] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b9a07698-27fb-4a3a-a88f-9308c931d907 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Lock "1d9218db-05d8-4e33-837f-e9865946237f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 886.136581] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b9a07698-27fb-4a3a-a88f-9308c931d907 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Lock "1d9218db-05d8-4e33-837f-e9865946237f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 886.138682] env[70020]: INFO nova.compute.manager [None req-b9a07698-27fb-4a3a-a88f-9308c931d907 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Terminating instance [ 886.154271] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3b3e825c-71b1-4944-b679-2645c48ce2c0 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.051s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 886.156737] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.286s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 886.159230] env[70020]: INFO nova.compute.claims [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 886.172766] env[70020]: DEBUG oslo_vmware.api [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618403, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.608003} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.177324] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 96966bf2-a9ff-48ba-be3f-c767e7b6eedd/96966bf2-a9ff-48ba-be3f-c767e7b6eedd.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 886.177324] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 886.177324] env[70020]: INFO nova.scheduler.client.report [None req-3b3e825c-71b1-4944-b679-2645c48ce2c0 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Deleted allocations for instance bb4e4986-af2a-4832-9ec7-777bca863dce [ 886.178141] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-45f46b86-c7c5-42ba-8bea-ec4a784479ed {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.189965] env[70020]: DEBUG oslo_vmware.api [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Task: {'id': task-3618404, 'name': PowerOnVM_Task, 'duration_secs': 0.756162} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.194648] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 886.194648] env[70020]: INFO nova.compute.manager [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Took 8.89 seconds to spawn the instance on the hypervisor. [ 886.194648] env[70020]: DEBUG nova.compute.manager [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 886.194648] env[70020]: DEBUG oslo_vmware.api [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 886.194648] env[70020]: value = "task-3618406" [ 886.194648] env[70020]: _type = "Task" [ 886.194648] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.194648] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f68b42d-de07-4d9e-b096-0730c971e639 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.209068] env[70020]: DEBUG oslo_vmware.api [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618406, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.520501] env[70020]: DEBUG oslo_concurrency.lockutils [req-a1f9cddf-71bc-493e-ba15-e8e61ff63a6b req-e3f488eb-3283-4381-88a2-3913b231f33e service nova] Releasing lock "refresh_cache-1d9218db-05d8-4e33-837f-e9865946237f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 886.551571] env[70020]: DEBUG oslo_vmware.api [None req-2059faca-6661-400a-9505-6b4a809ce82f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618405, 'name': ReconfigVM_Task, 'duration_secs': 0.516992} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.551863] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-2059faca-6661-400a-9505-6b4a809ce82f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Reconfigured VM instance instance-00000037 to attach disk [datastore2] volume-d582b3d1-9fab-425f-83f6-c90095c5e316/volume-d582b3d1-9fab-425f-83f6-c90095c5e316.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 886.556510] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-201a99ef-7665-48f2-adf4-0cd7d9306df8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.574183] env[70020]: DEBUG oslo_vmware.api [None req-2059faca-6661-400a-9505-6b4a809ce82f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 886.574183] env[70020]: value = "task-3618407" [ 886.574183] env[70020]: _type = "Task" [ 886.574183] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.583234] env[70020]: DEBUG oslo_vmware.api [None req-2059faca-6661-400a-9505-6b4a809ce82f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618407, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.643264] env[70020]: DEBUG nova.compute.manager [None req-b9a07698-27fb-4a3a-a88f-9308c931d907 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 886.643510] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b9a07698-27fb-4a3a-a88f-9308c931d907 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 886.644540] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b532d3d-3953-4872-bb49-a388c4b960da {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.653602] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9a07698-27fb-4a3a-a88f-9308c931d907 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 886.653883] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ecec5824-6fc0-4b14-87e9-65c6199a8b5b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.661856] env[70020]: DEBUG oslo_vmware.api [None req-b9a07698-27fb-4a3a-a88f-9308c931d907 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Waiting for the task: (returnval){ [ 886.661856] env[70020]: value = "task-3618408" [ 886.661856] env[70020]: _type = "Task" [ 886.661856] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.678355] env[70020]: DEBUG oslo_vmware.api [None req-b9a07698-27fb-4a3a-a88f-9308c931d907 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': task-3618408, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.688290] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3b3e825c-71b1-4944-b679-2645c48ce2c0 tempest-VolumesAssistedSnapshotsTest-996933272 tempest-VolumesAssistedSnapshotsTest-996933272-project-member] Lock "bb4e4986-af2a-4832-9ec7-777bca863dce" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.711s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 886.708827] env[70020]: DEBUG oslo_vmware.api [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618406, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092875} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.709249] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 886.710298] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b2876e0-4674-4920-9383-979d788f1a15 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.723077] env[70020]: INFO nova.compute.manager [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Took 47.06 seconds to build instance. [ 886.745069] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Reconfiguring VM instance instance-00000042 to attach disk [datastore2] 96966bf2-a9ff-48ba-be3f-c767e7b6eedd/96966bf2-a9ff-48ba-be3f-c767e7b6eedd.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 886.745278] env[70020]: DEBUG oslo_concurrency.lockutils [None req-aa3740c7-033a-4450-8571-33bab39fd6d7 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Lock "422ca332-5952-443c-a22e-67b1b45df5b9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.571s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 886.745724] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-af983119-3ba6-4de5-887b-9b4a36c035a2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.768856] env[70020]: DEBUG oslo_vmware.api [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 886.768856] env[70020]: value = "task-3618409" [ 886.768856] env[70020]: _type = "Task" [ 886.768856] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.779169] env[70020]: DEBUG oslo_vmware.api [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618409, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.805042] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 886.805661] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-baeb0274-ab83-40a6-957b-d3f3e78ba3d5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.815065] env[70020]: DEBUG oslo_vmware.api [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Waiting for the task: (returnval){ [ 886.815065] env[70020]: value = "task-3618410" [ 886.815065] env[70020]: _type = "Task" [ 886.815065] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.827162] env[70020]: DEBUG oslo_vmware.api [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Task: {'id': task-3618410, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.964083] env[70020]: INFO nova.compute.manager [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Rescuing [ 886.964428] env[70020]: DEBUG oslo_concurrency.lockutils [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Acquiring lock "refresh_cache-422ca332-5952-443c-a22e-67b1b45df5b9" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.964428] env[70020]: DEBUG oslo_concurrency.lockutils [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Acquired lock "refresh_cache-422ca332-5952-443c-a22e-67b1b45df5b9" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 886.964554] env[70020]: DEBUG nova.network.neutron [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 887.085633] env[70020]: DEBUG oslo_vmware.api [None req-2059faca-6661-400a-9505-6b4a809ce82f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618407, 'name': ReconfigVM_Task, 'duration_secs': 0.192291} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.085951] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-2059faca-6661-400a-9505-6b4a809ce82f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721712', 'volume_id': 'd582b3d1-9fab-425f-83f6-c90095c5e316', 'name': 'volume-d582b3d1-9fab-425f-83f6-c90095c5e316', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5c216231-afc5-41df-a243-bb2a17c20bfe', 'attached_at': '', 'detached_at': '', 'volume_id': 'd582b3d1-9fab-425f-83f6-c90095c5e316', 'serial': 'd582b3d1-9fab-425f-83f6-c90095c5e316'} {{(pid=70020) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 887.176244] env[70020]: DEBUG oslo_vmware.api [None req-b9a07698-27fb-4a3a-a88f-9308c931d907 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': task-3618408, 'name': PowerOffVM_Task, 'duration_secs': 0.301173} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.176911] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9a07698-27fb-4a3a-a88f-9308c931d907 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 887.177672] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b9a07698-27fb-4a3a-a88f-9308c931d907 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 887.177672] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cb7d4fc4-a4f8-4a4a-aebf-7dfa7b1d95de {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.257296] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b9a07698-27fb-4a3a-a88f-9308c931d907 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 887.257950] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b9a07698-27fb-4a3a-a88f-9308c931d907 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 887.258156] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9a07698-27fb-4a3a-a88f-9308c931d907 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Deleting the datastore file [datastore2] 1d9218db-05d8-4e33-837f-e9865946237f {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 887.258581] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d3e51a24-7e48-416b-b2da-44849975ad62 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.264390] env[70020]: DEBUG nova.compute.manager [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 887.274574] env[70020]: DEBUG oslo_vmware.api [None req-b9a07698-27fb-4a3a-a88f-9308c931d907 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Waiting for the task: (returnval){ [ 887.274574] env[70020]: value = "task-3618412" [ 887.274574] env[70020]: _type = "Task" [ 887.274574] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.282104] env[70020]: DEBUG oslo_vmware.api [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618409, 'name': ReconfigVM_Task, 'duration_secs': 0.373133} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.282841] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Reconfigured VM instance instance-00000042 to attach disk [datastore2] 96966bf2-a9ff-48ba-be3f-c767e7b6eedd/96966bf2-a9ff-48ba-be3f-c767e7b6eedd.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 887.283512] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9721a3c9-417b-4b06-8241-5fe9f5e663dd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.288252] env[70020]: DEBUG oslo_vmware.api [None req-b9a07698-27fb-4a3a-a88f-9308c931d907 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': task-3618412, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.298531] env[70020]: DEBUG oslo_vmware.api [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 887.298531] env[70020]: value = "task-3618413" [ 887.298531] env[70020]: _type = "Task" [ 887.298531] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.310283] env[70020]: DEBUG oslo_vmware.api [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618413, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.325852] env[70020]: DEBUG oslo_vmware.api [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Task: {'id': task-3618410, 'name': PowerOffVM_Task, 'duration_secs': 0.215903} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.326161] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 887.327307] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 887.327584] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-860a30b1-31b6-4a24-91b0-c13c4cf3102d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.336850] env[70020]: DEBUG oslo_vmware.api [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Waiting for the task: (returnval){ [ 887.336850] env[70020]: value = "task-3618414" [ 887.336850] env[70020]: _type = "Task" [ 887.336850] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.346563] env[70020]: DEBUG oslo_vmware.api [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Task: {'id': task-3618414, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.378701] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fa0bb60-ac26-49c0-94ed-bae5c1a26669 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.407672] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Updating instance 'b53f55c1-1867-410c-9c53-f552ff30d697' progress to 0 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 887.783303] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 887.789641] env[70020]: DEBUG oslo_vmware.api [None req-b9a07698-27fb-4a3a-a88f-9308c931d907 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': task-3618412, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.189996} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.789957] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9a07698-27fb-4a3a-a88f-9308c931d907 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 887.790227] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b9a07698-27fb-4a3a-a88f-9308c931d907 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 887.790663] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b9a07698-27fb-4a3a-a88f-9308c931d907 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 887.790891] env[70020]: INFO nova.compute.manager [None req-b9a07698-27fb-4a3a-a88f-9308c931d907 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Took 1.15 seconds to destroy the instance on the hypervisor. [ 887.791187] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b9a07698-27fb-4a3a-a88f-9308c931d907 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 887.791400] env[70020]: DEBUG nova.compute.manager [-] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 887.791494] env[70020]: DEBUG nova.network.neutron [-] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 887.808950] env[70020]: DEBUG oslo_vmware.api [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618413, 'name': Rename_Task, 'duration_secs': 0.177059} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.810050] env[70020]: DEBUG nova.network.neutron [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Updating instance_info_cache with network_info: [{"id": "759215b2-ed99-4281-9bf0-fb9379eab835", "address": "fa:16:3e:4a:93:7c", "network": {"id": "f1c779c8-1ed2-48d1-8344-30e93bf527ed", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-625584571-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "32f7008f815f482f992ddbc4906664b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "085fb0ff-9285-4f1d-a008-a14da4844357", "external-id": "nsx-vlan-transportzone-729", "segmentation_id": 729, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap759215b2-ed", "ovs_interfaceid": "759215b2-ed99-4281-9bf0-fb9379eab835", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.813979] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 887.816794] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6c4f39ab-f3b1-4ccf-893a-49261bd1fbf8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.825739] env[70020]: DEBUG oslo_vmware.api [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 887.825739] env[70020]: value = "task-3618415" [ 887.825739] env[70020]: _type = "Task" [ 887.825739] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.837305] env[70020]: DEBUG oslo_vmware.api [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618415, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.848952] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] VM already powered off {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 887.848952] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Volume detach. Driver type: vmdk {{(pid=70020) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 887.848952] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721598', 'volume_id': 'b709f316-53b7-4e6a-a871-7ecc3270770e', 'name': 'volume-b709f316-53b7-4e6a-a871-7ecc3270770e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6f2bc97b-0f0a-4f16-b41c-7af96130783f', 'attached_at': '', 'detached_at': '', 'volume_id': 'b709f316-53b7-4e6a-a871-7ecc3270770e', 'serial': 'b709f316-53b7-4e6a-a871-7ecc3270770e'} {{(pid=70020) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 887.849713] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f27b8f60-b42d-49e5-8f1a-b44219268ca0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.854238] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-798cd54c-e1be-4978-a427-db5c45eac055 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.881092] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcf784e6-4f16-4b56-b5a3-85fb0023f7ba {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.886048] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec9c40bc-2c23-4af7-bdb7-edc0a522c3f7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.920737] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c3fa6be-36a3-450f-bb46-1877812ae2b2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.925480] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 887.926173] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8eb3b357-68cc-44cf-9124-3fb75cc22b55 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.928362] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d581400-6229-456c-9bee-8e2b83ad4df6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.956419] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa988442-b13f-444c-afcc-b69f6179bf1b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.961112] env[70020]: DEBUG oslo_vmware.api [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for the task: (returnval){ [ 887.961112] env[70020]: value = "task-3618416" [ 887.961112] env[70020]: _type = "Task" [ 887.961112] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.961940] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-984a8bfa-c2a5-45d1-893e-2843f5c705c6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.980156] env[70020]: DEBUG nova.compute.provider_tree [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 887.996079] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] The volume has not been displaced from its original location: [datastore1] volume-b709f316-53b7-4e6a-a871-7ecc3270770e/volume-b709f316-53b7-4e6a-a871-7ecc3270770e.vmdk. No consolidation needed. {{(pid=70020) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 888.002182] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Reconfiguring VM instance instance-00000027 to detach disk 2000 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 888.006639] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f1885695-be2e-460e-90c4-caeb140849ba {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.020064] env[70020]: DEBUG oslo_vmware.api [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618416, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.030191] env[70020]: DEBUG oslo_vmware.api [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Waiting for the task: (returnval){ [ 888.030191] env[70020]: value = "task-3618417" [ 888.030191] env[70020]: _type = "Task" [ 888.030191] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.040300] env[70020]: DEBUG oslo_vmware.api [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Task: {'id': task-3618417, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.138091] env[70020]: DEBUG nova.compute.manager [req-1aa0b86e-96e4-4faa-b454-bab96b0d4de8 req-d733fc30-e441-48c1-be1b-4bcf99deda57 service nova] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Received event network-vif-deleted-d219e46c-ef15-4dec-a495-31e92d46d0c0 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 888.138481] env[70020]: INFO nova.compute.manager [req-1aa0b86e-96e4-4faa-b454-bab96b0d4de8 req-d733fc30-e441-48c1-be1b-4bcf99deda57 service nova] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Neutron deleted interface d219e46c-ef15-4dec-a495-31e92d46d0c0; detaching it from the instance and deleting it from the info cache [ 888.138767] env[70020]: DEBUG nova.network.neutron [req-1aa0b86e-96e4-4faa-b454-bab96b0d4de8 req-d733fc30-e441-48c1-be1b-4bcf99deda57 service nova] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 888.144246] env[70020]: DEBUG nova.objects.instance [None req-2059faca-6661-400a-9505-6b4a809ce82f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lazy-loading 'flavor' on Instance uuid 5c216231-afc5-41df-a243-bb2a17c20bfe {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 888.315370] env[70020]: DEBUG oslo_concurrency.lockutils [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Releasing lock "refresh_cache-422ca332-5952-443c-a22e-67b1b45df5b9" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 888.339970] env[70020]: DEBUG oslo_vmware.api [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618415, 'name': PowerOnVM_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.476674] env[70020]: DEBUG oslo_vmware.api [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618416, 'name': PowerOffVM_Task, 'duration_secs': 0.249912} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.476975] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 888.477202] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Updating instance 'b53f55c1-1867-410c-9c53-f552ff30d697' progress to 17 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 888.483758] env[70020]: DEBUG nova.scheduler.client.report [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 888.541365] env[70020]: DEBUG oslo_vmware.api [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Task: {'id': task-3618417, 'name': ReconfigVM_Task, 'duration_secs': 0.304946} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.541779] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Reconfigured VM instance instance-00000027 to detach disk 2000 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 888.549168] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-006fd3ab-8947-492f-9cea-87b73563257f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.569417] env[70020]: DEBUG oslo_vmware.api [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Waiting for the task: (returnval){ [ 888.569417] env[70020]: value = "task-3618418" [ 888.569417] env[70020]: _type = "Task" [ 888.569417] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.580040] env[70020]: DEBUG oslo_vmware.api [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Task: {'id': task-3618418, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.580402] env[70020]: DEBUG nova.network.neutron [-] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 888.644647] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-526ecae1-b95e-416c-b8de-4d975c12628a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.650839] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2059faca-6661-400a-9505-6b4a809ce82f tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "5c216231-afc5-41df-a243-bb2a17c20bfe" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.824s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 888.659015] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b139c684-c042-40da-bd4c-405afc7c4a3e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.712542] env[70020]: DEBUG nova.compute.manager [req-1aa0b86e-96e4-4faa-b454-bab96b0d4de8 req-d733fc30-e441-48c1-be1b-4bcf99deda57 service nova] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Detach interface failed, port_id=d219e46c-ef15-4dec-a495-31e92d46d0c0, reason: Instance 1d9218db-05d8-4e33-837f-e9865946237f could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 888.839345] env[70020]: DEBUG oslo_vmware.api [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618415, 'name': PowerOnVM_Task, 'duration_secs': 0.678201} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.839345] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 888.839345] env[70020]: INFO nova.compute.manager [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Took 8.83 seconds to spawn the instance on the hypervisor. [ 888.839345] env[70020]: DEBUG nova.compute.manager [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 888.839984] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9ffd193-b97f-4fa8-aaef-6c1c6e28e2fe {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.984882] env[70020]: DEBUG nova.virt.hardware [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 888.985157] env[70020]: DEBUG nova.virt.hardware [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 888.985315] env[70020]: DEBUG nova.virt.hardware [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 888.985493] env[70020]: DEBUG nova.virt.hardware [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 888.985632] env[70020]: DEBUG nova.virt.hardware [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 888.985775] env[70020]: DEBUG nova.virt.hardware [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 888.985979] env[70020]: DEBUG nova.virt.hardware [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 888.986622] env[70020]: DEBUG nova.virt.hardware [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 888.986846] env[70020]: DEBUG nova.virt.hardware [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 888.987033] env[70020]: DEBUG nova.virt.hardware [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 888.987215] env[70020]: DEBUG nova.virt.hardware [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 888.994753] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.838s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 888.995298] env[70020]: DEBUG nova.compute.manager [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 888.998786] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-00d8b4d9-36d0-49ea-988d-fce991742dca {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.012724] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b27d42c4-bc8f-41a2-a919-773374328e54 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.072s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 889.013250] env[70020]: DEBUG nova.objects.instance [None req-b27d42c4-bc8f-41a2-a919-773374328e54 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lazy-loading 'resources' on Instance uuid 08ce6bc8-30fe-4c63-80e1-26c84ae75702 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 889.021755] env[70020]: DEBUG oslo_vmware.api [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for the task: (returnval){ [ 889.021755] env[70020]: value = "task-3618419" [ 889.021755] env[70020]: _type = "Task" [ 889.021755] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.032225] env[70020]: DEBUG oslo_vmware.api [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618419, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.081110] env[70020]: DEBUG oslo_vmware.api [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Task: {'id': task-3618418, 'name': ReconfigVM_Task, 'duration_secs': 0.195523} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.081404] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721598', 'volume_id': 'b709f316-53b7-4e6a-a871-7ecc3270770e', 'name': 'volume-b709f316-53b7-4e6a-a871-7ecc3270770e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6f2bc97b-0f0a-4f16-b41c-7af96130783f', 'attached_at': '', 'detached_at': '', 'volume_id': 'b709f316-53b7-4e6a-a871-7ecc3270770e', 'serial': 'b709f316-53b7-4e6a-a871-7ecc3270770e'} {{(pid=70020) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 889.081687] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 889.082506] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea77882c-a272-45df-b921-d780738ebf1f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.085537] env[70020]: INFO nova.compute.manager [-] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Took 1.29 seconds to deallocate network for instance. [ 889.093245] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 889.093779] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-445c6d18-3a1b-4e54-b4b5-28e37aa3601b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.182175] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 889.182535] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 889.182877] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Deleting the datastore file [datastore1] 6f2bc97b-0f0a-4f16-b41c-7af96130783f {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 889.183469] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0105d299-b73b-4ec6-a7df-e09970adfce8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.193837] env[70020]: DEBUG oslo_vmware.api [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Waiting for the task: (returnval){ [ 889.193837] env[70020]: value = "task-3618421" [ 889.193837] env[70020]: _type = "Task" [ 889.193837] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.203873] env[70020]: DEBUG oslo_vmware.api [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Task: {'id': task-3618421, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.366685] env[70020]: INFO nova.compute.manager [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Took 48.90 seconds to build instance. [ 889.514406] env[70020]: DEBUG nova.compute.utils [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 889.515936] env[70020]: DEBUG nova.compute.manager [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 889.516129] env[70020]: DEBUG nova.network.neutron [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 889.531843] env[70020]: DEBUG oslo_vmware.api [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618419, 'name': ReconfigVM_Task, 'duration_secs': 0.232391} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.532203] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Updating instance 'b53f55c1-1867-410c-9c53-f552ff30d697' progress to 33 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 889.595332] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b9a07698-27fb-4a3a-a88f-9308c931d907 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 889.602512] env[70020]: DEBUG nova.policy [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cee4ead0b96e49f2a95a3d74ce424942', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a5ae2c1c42704f49854f86cca4f8a95e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 889.713384] env[70020]: DEBUG oslo_vmware.api [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Task: {'id': task-3618421, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157054} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.713641] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 889.713850] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 889.714024] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 889.796677] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Volume detach. Driver type: vmdk {{(pid=70020) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 889.797206] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1d4b4ea5-c02e-4fc9-bfd7-1ddb43b1f6a1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.807963] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49a36107-e669-4411-b988-5c289b06711e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.852883] env[70020]: ERROR nova.compute.manager [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Failed to detach volume b709f316-53b7-4e6a-a871-7ecc3270770e from /dev/sda: nova.exception.InstanceNotFound: Instance 6f2bc97b-0f0a-4f16-b41c-7af96130783f could not be found. [ 889.852883] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Traceback (most recent call last): [ 889.852883] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 889.852883] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] self.driver.rebuild(**kwargs) [ 889.852883] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 889.852883] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] raise NotImplementedError() [ 889.852883] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] NotImplementedError [ 889.852883] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] [ 889.852883] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] During handling of the above exception, another exception occurred: [ 889.852883] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] [ 889.852883] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Traceback (most recent call last): [ 889.852883] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 889.852883] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] self.driver.detach_volume(context, old_connection_info, [ 889.852883] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 889.852883] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] return self._volumeops.detach_volume(connection_info, instance) [ 889.852883] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 889.852883] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] self._detach_volume_vmdk(connection_info, instance) [ 889.852883] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 889.852883] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 889.852883] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 889.852883] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] stable_ref.fetch_moref(session) [ 889.852883] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 889.852883] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] raise exception.InstanceNotFound(instance_id=self._uuid) [ 889.852883] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] nova.exception.InstanceNotFound: Instance 6f2bc97b-0f0a-4f16-b41c-7af96130783f could not be found. [ 889.852883] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] [ 889.870144] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 889.870144] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-56430c07-d1e2-4adb-9c1b-7a58777c40d9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.870940] env[70020]: DEBUG oslo_concurrency.lockutils [None req-77879ba3-0426-46b3-aaa2-e728628616ab tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "96966bf2-a9ff-48ba-be3f-c767e7b6eedd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.677s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 889.878543] env[70020]: DEBUG oslo_vmware.api [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Waiting for the task: (returnval){ [ 889.878543] env[70020]: value = "task-3618422" [ 889.878543] env[70020]: _type = "Task" [ 889.878543] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.889493] env[70020]: DEBUG oslo_vmware.api [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Task: {'id': task-3618422, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.905946] env[70020]: DEBUG oslo_vmware.rw_handles [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52672958-72d5-56b9-9f03-944bd6265763/disk-0.vmdk. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 889.907352] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49584dbf-5f65-4455-a43c-9c39f4ceaa6e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.914725] env[70020]: DEBUG oslo_vmware.rw_handles [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52672958-72d5-56b9-9f03-944bd6265763/disk-0.vmdk is in state: ready. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 889.915399] env[70020]: ERROR oslo_vmware.rw_handles [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52672958-72d5-56b9-9f03-944bd6265763/disk-0.vmdk due to incomplete transfer. [ 889.915399] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-fe9062f3-5f21-467c-b552-5fc0c401bb27 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.925348] env[70020]: DEBUG oslo_vmware.rw_handles [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52672958-72d5-56b9-9f03-944bd6265763/disk-0.vmdk. {{(pid=70020) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 889.925558] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Uploaded image 3e51d9c3-d3e3-4ae3-8ecf-3a0726817d70 to the Glance image server {{(pid=70020) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 889.928104] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Destroying the VM {{(pid=70020) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 889.928370] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-094ef9aa-d41a-49be-90ec-f654d5998c07 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.936714] env[70020]: DEBUG oslo_vmware.api [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Waiting for the task: (returnval){ [ 889.936714] env[70020]: value = "task-3618423" [ 889.936714] env[70020]: _type = "Task" [ 889.936714] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.953916] env[70020]: DEBUG oslo_vmware.api [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618423, 'name': Destroy_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.966144] env[70020]: DEBUG nova.network.neutron [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Successfully created port: b41c227c-d57d-4d37-a05c-24351d6d22f1 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 890.019989] env[70020]: DEBUG nova.compute.utils [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Build of instance 6f2bc97b-0f0a-4f16-b41c-7af96130783f aborted: Failed to rebuild volume backed instance. {{(pid=70020) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 890.021654] env[70020]: DEBUG nova.compute.manager [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 890.027828] env[70020]: ERROR nova.compute.manager [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance 6f2bc97b-0f0a-4f16-b41c-7af96130783f aborted: Failed to rebuild volume backed instance. [ 890.027828] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Traceback (most recent call last): [ 890.027828] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 890.027828] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] self.driver.rebuild(**kwargs) [ 890.027828] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 890.027828] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] raise NotImplementedError() [ 890.027828] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] NotImplementedError [ 890.027828] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] [ 890.027828] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] During handling of the above exception, another exception occurred: [ 890.027828] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] [ 890.027828] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Traceback (most recent call last): [ 890.027828] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] File "/opt/stack/nova/nova/compute/manager.py", line 3643, in _rebuild_volume_backed_instance [ 890.027828] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] self._detach_root_volume(context, instance, root_bdm) [ 890.027828] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] File "/opt/stack/nova/nova/compute/manager.py", line 3622, in _detach_root_volume [ 890.027828] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] with excutils.save_and_reraise_exception(): [ 890.027828] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 890.027828] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] self.force_reraise() [ 890.027828] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 890.027828] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] raise self.value [ 890.027828] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 890.027828] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] self.driver.detach_volume(context, old_connection_info, [ 890.027828] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 890.027828] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] return self._volumeops.detach_volume(connection_info, instance) [ 890.027828] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 890.027828] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] self._detach_volume_vmdk(connection_info, instance) [ 890.027828] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 890.027828] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 890.027828] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 890.027828] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] stable_ref.fetch_moref(session) [ 890.027828] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 890.027828] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] raise exception.InstanceNotFound(instance_id=self._uuid) [ 890.027828] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] nova.exception.InstanceNotFound: Instance 6f2bc97b-0f0a-4f16-b41c-7af96130783f could not be found. [ 890.027828] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] [ 890.027828] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] During handling of the above exception, another exception occurred: [ 890.027828] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] [ 890.027828] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Traceback (most recent call last): [ 890.027828] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] File "/opt/stack/nova/nova/compute/manager.py", line 11471, in _error_out_instance_on_exception [ 890.027828] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] yield [ 890.027828] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] File "/opt/stack/nova/nova/compute/manager.py", line 3911, in rebuild_instance [ 890.027828] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] self._do_rebuild_instance_with_claim( [ 890.028858] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] File "/opt/stack/nova/nova/compute/manager.py", line 3997, in _do_rebuild_instance_with_claim [ 890.028858] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] self._do_rebuild_instance( [ 890.028858] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] File "/opt/stack/nova/nova/compute/manager.py", line 4189, in _do_rebuild_instance [ 890.028858] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] self._rebuild_default_impl(**kwargs) [ 890.028858] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] File "/opt/stack/nova/nova/compute/manager.py", line 3766, in _rebuild_default_impl [ 890.028858] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] self._rebuild_volume_backed_instance( [ 890.028858] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] File "/opt/stack/nova/nova/compute/manager.py", line 3658, in _rebuild_volume_backed_instance [ 890.028858] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] raise exception.BuildAbortException( [ 890.028858] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] nova.exception.BuildAbortException: Build of instance 6f2bc97b-0f0a-4f16-b41c-7af96130783f aborted: Failed to rebuild volume backed instance. [ 890.028858] env[70020]: ERROR nova.compute.manager [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] [ 890.039308] env[70020]: DEBUG nova.virt.hardware [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T23:03:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='8fe80bc1-98b9-4377-a5a8-72095e677071',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-537589333',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 890.039575] env[70020]: DEBUG nova.virt.hardware [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 890.040286] env[70020]: DEBUG nova.virt.hardware [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 890.040286] env[70020]: DEBUG nova.virt.hardware [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 890.040404] env[70020]: DEBUG nova.virt.hardware [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 890.040779] env[70020]: DEBUG nova.virt.hardware [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 890.041035] env[70020]: DEBUG nova.virt.hardware [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 890.041340] env[70020]: DEBUG nova.virt.hardware [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 890.041529] env[70020]: DEBUG nova.virt.hardware [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 890.041828] env[70020]: DEBUG nova.virt.hardware [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 890.042333] env[70020]: DEBUG nova.virt.hardware [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 890.048251] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Reconfiguring VM instance instance-00000038 to detach disk 2000 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 890.051511] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-90c071ed-4b07-4e1d-88ab-c2ff61c84567 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.085814] env[70020]: DEBUG oslo_vmware.api [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for the task: (returnval){ [ 890.085814] env[70020]: value = "task-3618424" [ 890.085814] env[70020]: _type = "Task" [ 890.085814] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.102467] env[70020]: DEBUG oslo_vmware.api [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618424, 'name': ReconfigVM_Task} progress is 10%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.330986] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-200b46f5-741f-40bb-8991-a9eea456abb8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.341573] env[70020]: DEBUG nova.network.neutron [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Successfully created port: 926fd7e2-c562-49a2-9146-1c38fc20e80a {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 890.350221] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8f3abb9-797c-4f39-8d0e-03ab2aae3b0d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.385441] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "d45966fe-98ff-4466-8e7e-90550034742f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 890.385724] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "d45966fe-98ff-4466-8e7e-90550034742f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 890.386012] env[70020]: DEBUG nova.compute.manager [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 890.394357] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7296c30a-8656-4cf1-9cd3-007ab734943c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.405999] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d5e08a2-f10b-4a63-b1e0-ee757c7fe9dd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.409964] env[70020]: DEBUG oslo_vmware.api [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Task: {'id': task-3618422, 'name': PowerOffVM_Task, 'duration_secs': 0.209624} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.410888] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 890.411917] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b73bc21-d32e-4825-93af-3b4d9202053e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.422377] env[70020]: DEBUG nova.compute.provider_tree [None req-b27d42c4-bc8f-41a2-a919-773374328e54 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 890.444299] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a60f9e75-7aab-4551-9b0a-e33ba56dc59d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.457491] env[70020]: DEBUG oslo_vmware.api [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618423, 'name': Destroy_Task, 'duration_secs': 0.404234} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.457944] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Destroyed the VM [ 890.458217] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Deleting Snapshot of the VM instance {{(pid=70020) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 890.458464] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-de387f4b-379c-46de-ac56-5955684c9aaa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.466646] env[70020]: DEBUG oslo_vmware.api [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Waiting for the task: (returnval){ [ 890.466646] env[70020]: value = "task-3618425" [ 890.466646] env[70020]: _type = "Task" [ 890.466646] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.481626] env[70020]: DEBUG oslo_vmware.api [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618425, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.481626] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 890.481626] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4777a24c-8b19-4571-873f-4ab4f15e565e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.487082] env[70020]: DEBUG oslo_vmware.api [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Waiting for the task: (returnval){ [ 890.487082] env[70020]: value = "task-3618426" [ 890.487082] env[70020]: _type = "Task" [ 890.487082] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.497228] env[70020]: DEBUG oslo_vmware.api [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Task: {'id': task-3618426, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.596503] env[70020]: DEBUG oslo_vmware.api [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618424, 'name': ReconfigVM_Task, 'duration_secs': 0.196907} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.596785] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Reconfigured VM instance instance-00000038 to detach disk 2000 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 890.597582] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68cf6d02-4f8d-4428-b660-7a96dd04b7fa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.621729] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] b53f55c1-1867-410c-9c53-f552ff30d697/b53f55c1-1867-410c-9c53-f552ff30d697.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 890.625028] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-57fedd9d-dce8-4d66-9205-309a56a598e0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.646964] env[70020]: DEBUG oslo_vmware.api [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for the task: (returnval){ [ 890.646964] env[70020]: value = "task-3618427" [ 890.646964] env[70020]: _type = "Task" [ 890.646964] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.655520] env[70020]: DEBUG oslo_vmware.api [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618427, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.911143] env[70020]: DEBUG oslo_concurrency.lockutils [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 890.925891] env[70020]: DEBUG nova.scheduler.client.report [None req-b27d42c4-bc8f-41a2-a919-773374328e54 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 890.977479] env[70020]: DEBUG oslo_vmware.api [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618425, 'name': RemoveSnapshot_Task} progress is 77%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.997746] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] VM already powered off {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 890.997964] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 890.998235] env[70020]: DEBUG oslo_concurrency.lockutils [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.998382] env[70020]: DEBUG oslo_concurrency.lockutils [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 890.998699] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 890.998801] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-23f5196f-02b3-417b-8c98-d65ddfa12ecf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.016612] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 891.016804] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 891.017596] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab7bca0d-e869-4cac-bc0e-20214190c887 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.023782] env[70020]: DEBUG oslo_vmware.api [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Waiting for the task: (returnval){ [ 891.023782] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5219e018-303e-97db-46de-8f7fda1920d8" [ 891.023782] env[70020]: _type = "Task" [ 891.023782] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.033316] env[70020]: DEBUG oslo_vmware.api [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5219e018-303e-97db-46de-8f7fda1920d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.041472] env[70020]: DEBUG nova.compute.manager [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 891.074831] env[70020]: DEBUG nova.virt.hardware [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 891.075114] env[70020]: DEBUG nova.virt.hardware [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 891.075247] env[70020]: DEBUG nova.virt.hardware [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 891.076597] env[70020]: DEBUG nova.virt.hardware [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 891.076597] env[70020]: DEBUG nova.virt.hardware [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 891.076597] env[70020]: DEBUG nova.virt.hardware [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 891.076597] env[70020]: DEBUG nova.virt.hardware [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 891.076597] env[70020]: DEBUG nova.virt.hardware [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 891.076597] env[70020]: DEBUG nova.virt.hardware [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 891.076597] env[70020]: DEBUG nova.virt.hardware [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 891.076820] env[70020]: DEBUG nova.virt.hardware [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 891.078648] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b313f63d-4930-425d-aabb-5d8a2574e132 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.088679] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c69d8c1-5dfe-4abc-a432-33ebb6b10093 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.162134] env[70020]: DEBUG oslo_vmware.api [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618427, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.363748] env[70020]: DEBUG nova.compute.manager [req-084b07d2-dc8b-4f51-97a8-420a3c37d25c req-3151a898-7d68-43e8-8668-d14b2c4295b1 service nova] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Received event network-changed-a59ccbd4-85b3-4a98-8407-29d65fea21f5 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 891.364014] env[70020]: DEBUG nova.compute.manager [req-084b07d2-dc8b-4f51-97a8-420a3c37d25c req-3151a898-7d68-43e8-8668-d14b2c4295b1 service nova] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Refreshing instance network info cache due to event network-changed-a59ccbd4-85b3-4a98-8407-29d65fea21f5. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 891.366148] env[70020]: DEBUG oslo_concurrency.lockutils [req-084b07d2-dc8b-4f51-97a8-420a3c37d25c req-3151a898-7d68-43e8-8668-d14b2c4295b1 service nova] Acquiring lock "refresh_cache-96966bf2-a9ff-48ba-be3f-c767e7b6eedd" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.366201] env[70020]: DEBUG oslo_concurrency.lockutils [req-084b07d2-dc8b-4f51-97a8-420a3c37d25c req-3151a898-7d68-43e8-8668-d14b2c4295b1 service nova] Acquired lock "refresh_cache-96966bf2-a9ff-48ba-be3f-c767e7b6eedd" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 891.366368] env[70020]: DEBUG nova.network.neutron [req-084b07d2-dc8b-4f51-97a8-420a3c37d25c req-3151a898-7d68-43e8-8668-d14b2c4295b1 service nova] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Refreshing network info cache for port a59ccbd4-85b3-4a98-8407-29d65fea21f5 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 891.436198] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b27d42c4-bc8f-41a2-a919-773374328e54 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.423s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 891.438969] env[70020]: DEBUG oslo_concurrency.lockutils [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.222s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 891.441043] env[70020]: INFO nova.compute.claims [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 891.469607] env[70020]: INFO nova.scheduler.client.report [None req-b27d42c4-bc8f-41a2-a919-773374328e54 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Deleted allocations for instance 08ce6bc8-30fe-4c63-80e1-26c84ae75702 [ 891.484882] env[70020]: DEBUG oslo_vmware.api [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618425, 'name': RemoveSnapshot_Task, 'duration_secs': 0.582256} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.484882] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Deleted Snapshot of the VM instance {{(pid=70020) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 891.485016] env[70020]: INFO nova.compute.manager [None req-31b82cef-a7ec-49a8-9d80-749c9b4b103c tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Took 14.43 seconds to snapshot the instance on the hypervisor. [ 891.538065] env[70020]: DEBUG oslo_vmware.api [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5219e018-303e-97db-46de-8f7fda1920d8, 'name': SearchDatastore_Task, 'duration_secs': 0.011146} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.539584] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1ac1218-a26b-48db-b481-d92ec6fe6ec1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.555780] env[70020]: DEBUG oslo_vmware.api [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Waiting for the task: (returnval){ [ 891.555780] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52e30bea-6190-1a50-33e0-0c5f4b0a4067" [ 891.555780] env[70020]: _type = "Task" [ 891.555780] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.565664] env[70020]: DEBUG oslo_vmware.api [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52e30bea-6190-1a50-33e0-0c5f4b0a4067, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.667184] env[70020]: DEBUG oslo_vmware.api [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618427, 'name': ReconfigVM_Task, 'duration_secs': 0.906917} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.667536] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Reconfigured VM instance instance-00000038 to attach disk [datastore1] b53f55c1-1867-410c-9c53-f552ff30d697/b53f55c1-1867-410c-9c53-f552ff30d697.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 891.668226] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Updating instance 'b53f55c1-1867-410c-9c53-f552ff30d697' progress to 50 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 891.900227] env[70020]: DEBUG nova.compute.manager [req-c899bf26-dac7-474d-9a93-3273564f13fd req-1a953ade-fb2d-42a8-8ffe-5e8d245d7403 service nova] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Received event network-vif-plugged-b41c227c-d57d-4d37-a05c-24351d6d22f1 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 891.900509] env[70020]: DEBUG oslo_concurrency.lockutils [req-c899bf26-dac7-474d-9a93-3273564f13fd req-1a953ade-fb2d-42a8-8ffe-5e8d245d7403 service nova] Acquiring lock "00232eca-da03-49ea-b62b-d9721739b0ec-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 891.900827] env[70020]: DEBUG oslo_concurrency.lockutils [req-c899bf26-dac7-474d-9a93-3273564f13fd req-1a953ade-fb2d-42a8-8ffe-5e8d245d7403 service nova] Lock "00232eca-da03-49ea-b62b-d9721739b0ec-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 891.901276] env[70020]: DEBUG oslo_concurrency.lockutils [req-c899bf26-dac7-474d-9a93-3273564f13fd req-1a953ade-fb2d-42a8-8ffe-5e8d245d7403 service nova] Lock "00232eca-da03-49ea-b62b-d9721739b0ec-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 891.901410] env[70020]: DEBUG nova.compute.manager [req-c899bf26-dac7-474d-9a93-3273564f13fd req-1a953ade-fb2d-42a8-8ffe-5e8d245d7403 service nova] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] No waiting events found dispatching network-vif-plugged-b41c227c-d57d-4d37-a05c-24351d6d22f1 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 891.901580] env[70020]: WARNING nova.compute.manager [req-c899bf26-dac7-474d-9a93-3273564f13fd req-1a953ade-fb2d-42a8-8ffe-5e8d245d7403 service nova] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Received unexpected event network-vif-plugged-b41c227c-d57d-4d37-a05c-24351d6d22f1 for instance with vm_state building and task_state spawning. [ 891.981083] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b27d42c4-bc8f-41a2-a919-773374328e54 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "08ce6bc8-30fe-4c63-80e1-26c84ae75702" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.734s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 892.048563] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 892.068613] env[70020]: DEBUG oslo_vmware.api [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52e30bea-6190-1a50-33e0-0c5f4b0a4067, 'name': SearchDatastore_Task, 'duration_secs': 0.031599} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.069058] env[70020]: DEBUG oslo_concurrency.lockutils [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 892.072529] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 422ca332-5952-443c-a22e-67b1b45df5b9/c9cd83bf-fd12-4173-a067-f57d38f23556-rescue.vmdk. {{(pid=70020) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 892.072529] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-426d35c3-fe96-4570-88b4-9b77fac72e15 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.080749] env[70020]: DEBUG oslo_vmware.api [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Waiting for the task: (returnval){ [ 892.080749] env[70020]: value = "task-3618428" [ 892.080749] env[70020]: _type = "Task" [ 892.080749] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.093194] env[70020]: DEBUG oslo_vmware.api [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Task: {'id': task-3618428, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.156586] env[70020]: DEBUG nova.network.neutron [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Successfully updated port: b41c227c-d57d-4d37-a05c-24351d6d22f1 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 892.179936] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c9ac25-75fc-45a6-a6dc-8c1a9c4428a4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.205099] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2952478d-2bfb-4728-8848-2fa12f0404e9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.226697] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Updating instance 'b53f55c1-1867-410c-9c53-f552ff30d697' progress to 67 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 892.401839] env[70020]: DEBUG nova.network.neutron [req-084b07d2-dc8b-4f51-97a8-420a3c37d25c req-3151a898-7d68-43e8-8668-d14b2c4295b1 service nova] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Updated VIF entry in instance network info cache for port a59ccbd4-85b3-4a98-8407-29d65fea21f5. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 892.402356] env[70020]: DEBUG nova.network.neutron [req-084b07d2-dc8b-4f51-97a8-420a3c37d25c req-3151a898-7d68-43e8-8668-d14b2c4295b1 service nova] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Updating instance_info_cache with network_info: [{"id": "a59ccbd4-85b3-4a98-8407-29d65fea21f5", "address": "fa:16:3e:ae:ed:1c", "network": {"id": "b566519a-edda-4c57-92ca-a702e586c638", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-790170115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74b060ffb3ac4ecd95dcd85d4744dc2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa59ccbd4-85", "ovs_interfaceid": "a59ccbd4-85b3-4a98-8407-29d65fea21f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.592057] env[70020]: DEBUG oslo_vmware.api [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Task: {'id': task-3618428, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.798034] env[70020]: DEBUG nova.network.neutron [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Port ac1e36da-5de5-4451-a9e7-39165ab5f152 binding to destination host cpu-1 is already ACTIVE {{(pid=70020) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 892.905452] env[70020]: DEBUG oslo_concurrency.lockutils [req-084b07d2-dc8b-4f51-97a8-420a3c37d25c req-3151a898-7d68-43e8-8668-d14b2c4295b1 service nova] Releasing lock "refresh_cache-96966bf2-a9ff-48ba-be3f-c767e7b6eedd" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 893.094760] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67da0c66-fc8c-4528-94a4-eeceb3850cea {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.100901] env[70020]: DEBUG oslo_vmware.api [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Task: {'id': task-3618428, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.519391} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.101871] env[70020]: INFO nova.virt.vmwareapi.ds_util [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 422ca332-5952-443c-a22e-67b1b45df5b9/c9cd83bf-fd12-4173-a067-f57d38f23556-rescue.vmdk. [ 893.102652] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0dd4c78-3a64-4dc9-9645-87d02074959f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.108431] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5a9bf15-694d-47c5-b62f-708ce9fcb598 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.132198] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Reconfiguring VM instance instance-00000041 to attach disk [datastore2] 422ca332-5952-443c-a22e-67b1b45df5b9/c9cd83bf-fd12-4173-a067-f57d38f23556-rescue.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 893.133056] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c4c37689-cbcd-492e-92d4-38a6f0eca729 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.174750] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b0d30ec-c0f3-435b-88af-05627a745e97 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.181050] env[70020]: DEBUG oslo_vmware.api [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Waiting for the task: (returnval){ [ 893.181050] env[70020]: value = "task-3618429" [ 893.181050] env[70020]: _type = "Task" [ 893.181050] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.187021] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb2006e1-9038-4dc4-9fce-46a38917e5f3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.198328] env[70020]: DEBUG oslo_vmware.api [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Task: {'id': task-3618429, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.206570] env[70020]: DEBUG nova.compute.provider_tree [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 893.501212] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f6ca5bd7-49fa-4368-9e8a-1a667abea13b tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Acquiring lock "6f2bc97b-0f0a-4f16-b41c-7af96130783f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 893.504474] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f6ca5bd7-49fa-4368-9e8a-1a667abea13b tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Lock "6f2bc97b-0f0a-4f16-b41c-7af96130783f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 893.504474] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f6ca5bd7-49fa-4368-9e8a-1a667abea13b tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Acquiring lock "6f2bc97b-0f0a-4f16-b41c-7af96130783f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 893.504474] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f6ca5bd7-49fa-4368-9e8a-1a667abea13b tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Lock "6f2bc97b-0f0a-4f16-b41c-7af96130783f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 893.504474] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f6ca5bd7-49fa-4368-9e8a-1a667abea13b tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Lock "6f2bc97b-0f0a-4f16-b41c-7af96130783f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 893.505643] env[70020]: INFO nova.compute.manager [None req-f6ca5bd7-49fa-4368-9e8a-1a667abea13b tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Terminating instance [ 893.696822] env[70020]: DEBUG oslo_vmware.api [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Task: {'id': task-3618429, 'name': ReconfigVM_Task, 'duration_secs': 0.303024} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.697435] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Reconfigured VM instance instance-00000041 to attach disk [datastore2] 422ca332-5952-443c-a22e-67b1b45df5b9/c9cd83bf-fd12-4173-a067-f57d38f23556-rescue.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 893.700172] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e15f165-65ff-471e-a8b8-372f07388524 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.724408] env[70020]: DEBUG nova.scheduler.client.report [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 893.737391] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dcc38731-2c57-4585-94f3-56daea827ec3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.759226] env[70020]: DEBUG oslo_vmware.api [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Waiting for the task: (returnval){ [ 893.759226] env[70020]: value = "task-3618430" [ 893.759226] env[70020]: _type = "Task" [ 893.759226] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.768220] env[70020]: DEBUG oslo_vmware.api [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Task: {'id': task-3618430, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.840460] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquiring lock "b53f55c1-1867-410c-9c53-f552ff30d697-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 893.842406] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Lock "b53f55c1-1867-410c-9c53-f552ff30d697-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 893.842406] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Lock "b53f55c1-1867-410c-9c53-f552ff30d697-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 893.997984] env[70020]: DEBUG nova.compute.manager [req-1afe0af8-6756-4b80-b190-4bcf5136ffab req-5bd5d8c0-4cba-4516-909d-6f013049df8b service nova] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Received event network-changed-b41c227c-d57d-4d37-a05c-24351d6d22f1 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 893.998367] env[70020]: DEBUG nova.compute.manager [req-1afe0af8-6756-4b80-b190-4bcf5136ffab req-5bd5d8c0-4cba-4516-909d-6f013049df8b service nova] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Refreshing instance network info cache due to event network-changed-b41c227c-d57d-4d37-a05c-24351d6d22f1. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 893.998740] env[70020]: DEBUG oslo_concurrency.lockutils [req-1afe0af8-6756-4b80-b190-4bcf5136ffab req-5bd5d8c0-4cba-4516-909d-6f013049df8b service nova] Acquiring lock "refresh_cache-00232eca-da03-49ea-b62b-d9721739b0ec" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.999849] env[70020]: DEBUG oslo_concurrency.lockutils [req-1afe0af8-6756-4b80-b190-4bcf5136ffab req-5bd5d8c0-4cba-4516-909d-6f013049df8b service nova] Acquired lock "refresh_cache-00232eca-da03-49ea-b62b-d9721739b0ec" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 893.999849] env[70020]: DEBUG nova.network.neutron [req-1afe0af8-6756-4b80-b190-4bcf5136ffab req-5bd5d8c0-4cba-4516-909d-6f013049df8b service nova] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Refreshing network info cache for port b41c227c-d57d-4d37-a05c-24351d6d22f1 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 894.012915] env[70020]: DEBUG nova.compute.manager [None req-f6ca5bd7-49fa-4368-9e8a-1a667abea13b tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 894.012915] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3a46ffed-fe03-4e5c-af93-b89fe96b62e5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.022067] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f035b22a-4517-4b40-a53f-27a30074a73e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.062082] env[70020]: WARNING nova.virt.vmwareapi.driver [None req-f6ca5bd7-49fa-4368-9e8a-1a667abea13b tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 6f2bc97b-0f0a-4f16-b41c-7af96130783f could not be found. [ 894.062402] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-f6ca5bd7-49fa-4368-9e8a-1a667abea13b tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 894.062826] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-75b9feb8-112f-4243-a1d1-067f6e9a9c7e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.072529] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-109949a6-e249-40cf-a293-4336ba121fd0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.116290] env[70020]: WARNING nova.virt.vmwareapi.vmops [None req-f6ca5bd7-49fa-4368-9e8a-1a667abea13b tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6f2bc97b-0f0a-4f16-b41c-7af96130783f could not be found. [ 894.116610] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-f6ca5bd7-49fa-4368-9e8a-1a667abea13b tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 894.117806] env[70020]: INFO nova.compute.manager [None req-f6ca5bd7-49fa-4368-9e8a-1a667abea13b tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Took 0.11 seconds to destroy the instance on the hypervisor. [ 894.118158] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f6ca5bd7-49fa-4368-9e8a-1a667abea13b tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 894.118491] env[70020]: DEBUG nova.compute.manager [-] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 894.118749] env[70020]: DEBUG nova.network.neutron [-] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 894.238457] env[70020]: DEBUG oslo_concurrency.lockutils [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.799s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 894.238949] env[70020]: DEBUG nova.compute.manager [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 894.241571] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 42.659s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 894.277318] env[70020]: DEBUG oslo_vmware.api [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Task: {'id': task-3618430, 'name': ReconfigVM_Task, 'duration_secs': 0.154506} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.277732] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 894.278061] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-db9bff0f-0d32-4693-bc00-9ba53968e08c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.285731] env[70020]: DEBUG oslo_vmware.api [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Waiting for the task: (returnval){ [ 894.285731] env[70020]: value = "task-3618431" [ 894.285731] env[70020]: _type = "Task" [ 894.285731] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.295963] env[70020]: DEBUG oslo_vmware.api [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Task: {'id': task-3618431, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.478373] env[70020]: DEBUG nova.compute.manager [None req-2bb5f6cc-44d3-47fa-8675-c69478868134 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 894.479294] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f044519-9863-48fe-b291-634dfefe4685 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.550526] env[70020]: DEBUG nova.compute.manager [req-e175c5de-512c-4118-9c76-500ce514733f req-a506edaf-f08c-4677-adff-63447b7f18ac service nova] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Received event network-vif-plugged-926fd7e2-c562-49a2-9146-1c38fc20e80a {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 894.550887] env[70020]: DEBUG oslo_concurrency.lockutils [req-e175c5de-512c-4118-9c76-500ce514733f req-a506edaf-f08c-4677-adff-63447b7f18ac service nova] Acquiring lock "00232eca-da03-49ea-b62b-d9721739b0ec-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 894.552568] env[70020]: DEBUG oslo_concurrency.lockutils [req-e175c5de-512c-4118-9c76-500ce514733f req-a506edaf-f08c-4677-adff-63447b7f18ac service nova] Lock "00232eca-da03-49ea-b62b-d9721739b0ec-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 894.552759] env[70020]: DEBUG oslo_concurrency.lockutils [req-e175c5de-512c-4118-9c76-500ce514733f req-a506edaf-f08c-4677-adff-63447b7f18ac service nova] Lock "00232eca-da03-49ea-b62b-d9721739b0ec-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.002s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 894.552982] env[70020]: DEBUG nova.compute.manager [req-e175c5de-512c-4118-9c76-500ce514733f req-a506edaf-f08c-4677-adff-63447b7f18ac service nova] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] No waiting events found dispatching network-vif-plugged-926fd7e2-c562-49a2-9146-1c38fc20e80a {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 894.553170] env[70020]: WARNING nova.compute.manager [req-e175c5de-512c-4118-9c76-500ce514733f req-a506edaf-f08c-4677-adff-63447b7f18ac service nova] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Received unexpected event network-vif-plugged-926fd7e2-c562-49a2-9146-1c38fc20e80a for instance with vm_state building and task_state spawning. [ 894.581155] env[70020]: DEBUG nova.network.neutron [req-1afe0af8-6756-4b80-b190-4bcf5136ffab req-5bd5d8c0-4cba-4516-909d-6f013049df8b service nova] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 894.600742] env[70020]: DEBUG nova.network.neutron [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Successfully updated port: 926fd7e2-c562-49a2-9146-1c38fc20e80a {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 894.730330] env[70020]: DEBUG nova.network.neutron [req-1afe0af8-6756-4b80-b190-4bcf5136ffab req-5bd5d8c0-4cba-4516-909d-6f013049df8b service nova] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.767326] env[70020]: DEBUG nova.compute.utils [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 894.769324] env[70020]: DEBUG nova.compute.manager [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 894.769501] env[70020]: DEBUG nova.network.neutron [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 61875dcc-5b76-409b-987f-4ae875909257] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 894.798068] env[70020]: DEBUG oslo_vmware.api [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Task: {'id': task-3618431, 'name': PowerOnVM_Task, 'duration_secs': 0.505085} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.798954] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 894.801841] env[70020]: DEBUG nova.compute.manager [None req-85f77789-5f7f-420c-8bfd-143cda97babf tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 894.802959] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-885d67b6-99ed-4810-8977-60df716983b5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.859647] env[70020]: DEBUG nova.policy [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c9be1a256e3b49f7a93dad4d718d7deb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '19128323d60a4992b0a2f837317d3f04', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 894.931059] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquiring lock "refresh_cache-b53f55c1-1867-410c-9c53-f552ff30d697" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.931255] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquired lock "refresh_cache-b53f55c1-1867-410c-9c53-f552ff30d697" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 894.931432] env[70020]: DEBUG nova.network.neutron [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 894.996242] env[70020]: INFO nova.compute.manager [None req-2bb5f6cc-44d3-47fa-8675-c69478868134 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] instance snapshotting [ 894.999688] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5d392d6-f10e-4a2b-9649-baa8a81af9f3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.027853] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5abb7e6-c692-4520-be93-63bc1fbaf537 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.111917] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Acquiring lock "refresh_cache-00232eca-da03-49ea-b62b-d9721739b0ec" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.208049] env[70020]: DEBUG oslo_concurrency.lockutils [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Acquiring lock "da07cb36-244f-4f48-a5b6-8d00324c1edf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 895.208049] env[70020]: DEBUG oslo_concurrency.lockutils [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "da07cb36-244f-4f48-a5b6-8d00324c1edf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 895.235519] env[70020]: DEBUG oslo_concurrency.lockutils [req-1afe0af8-6756-4b80-b190-4bcf5136ffab req-5bd5d8c0-4cba-4516-909d-6f013049df8b service nova] Releasing lock "refresh_cache-00232eca-da03-49ea-b62b-d9721739b0ec" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 895.235886] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Acquired lock "refresh_cache-00232eca-da03-49ea-b62b-d9721739b0ec" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 895.236061] env[70020]: DEBUG nova.network.neutron [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 895.269189] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Applying migration context for instance b53f55c1-1867-410c-9c53-f552ff30d697 as it has an incoming, in-progress migration b5628a9f-1bd9-44da-91e8-035e91b65f82. Migration status is post-migrating {{(pid=70020) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 895.272164] env[70020]: INFO nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Updating resource usage from migration b5628a9f-1bd9-44da-91e8-035e91b65f82 [ 895.274863] env[70020]: DEBUG nova.compute.manager [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 895.300509] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance ea97f6ab-057e-44d3-835a-68b46d241621 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 895.300509] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance bc57657e-99e8-46b8-9731-ddd4864a3114 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 895.300662] env[70020]: WARNING nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 0caa6acd-29d4-43ee-8b32-5149462dfc1c is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 895.300793] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 8adadb2e-2a20-45b1-bed8-34e09df25f39 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 895.300912] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 4b5750d4-98ec-4c70-b214-fad97060b606 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 895.301235] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 13f6daa5-d859-40ed-b1b0-edd7717b8df3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 895.301235] env[70020]: WARNING nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 16c45b86-317a-4d0c-a402-51c85af37a5b is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 895.301348] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance c9ce57f3-f9a2-40aa-b7eb-403840c34304 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 895.301383] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 2198e7f8-5458-4b97-abb3-0a3c932cebc2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 895.301523] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance f56e88f6-3a25-44d9-bdb1-cc4291169c9c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 895.301600] env[70020]: WARNING nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance f16d60a4-5f80-4f41-b994-068de48775ad is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 895.301709] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 5c216231-afc5-41df-a243-bb2a17c20bfe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 895.301826] env[70020]: WARNING nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 895.301932] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance ef0d716a-080e-4167-bd34-b2c660b95c88 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 895.302163] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 8bff6907-c2b0-4ad1-9298-b2d622d33fde actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 895.302337] env[70020]: WARNING nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 24184767-92f7-48b3-bbad-16a596ececde is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 895.303675] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance ef85421b-b679-4f38-b052-5695baa2e405 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 895.303675] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 4335f92a-897a-4779-be70-4f0754a66d53 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 895.303675] env[70020]: WARNING nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 1d9218db-05d8-4e33-837f-e9865946237f is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 895.303675] env[70020]: WARNING nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance d3dbc3d1-bba7-4803-bacb-02de27a6a4ff is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 895.303675] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 422ca332-5952-443c-a22e-67b1b45df5b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 895.303675] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 96966bf2-a9ff-48ba-be3f-c767e7b6eedd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 895.303675] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 00232eca-da03-49ea-b62b-d9721739b0ec actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 895.303675] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 61875dcc-5b76-409b-987f-4ae875909257 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 895.310127] env[70020]: DEBUG nova.network.neutron [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Successfully created port: 6d51b5bb-0659-4302-96ab-4991ce36e722 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 895.493945] env[70020]: DEBUG nova.network.neutron [-] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.543106] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2bb5f6cc-44d3-47fa-8675-c69478868134 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Creating Snapshot of the VM instance {{(pid=70020) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 895.543495] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-a1617822-3df3-495c-b8af-703e708a179b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.556223] env[70020]: DEBUG oslo_vmware.api [None req-2bb5f6cc-44d3-47fa-8675-c69478868134 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Waiting for the task: (returnval){ [ 895.556223] env[70020]: value = "task-3618432" [ 895.556223] env[70020]: _type = "Task" [ 895.556223] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.563350] env[70020]: DEBUG oslo_vmware.api [None req-2bb5f6cc-44d3-47fa-8675-c69478868134 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618432, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.809165] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance d65ab5e0-189c-43e1-accf-16248ad02852 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 895.957024] env[70020]: DEBUG nova.network.neutron [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 895.996522] env[70020]: INFO nova.compute.manager [-] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Took 1.88 seconds to deallocate network for instance. [ 896.010883] env[70020]: DEBUG nova.network.neutron [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Updating instance_info_cache with network_info: [{"id": "ac1e36da-5de5-4451-a9e7-39165ab5f152", "address": "fa:16:3e:c2:87:4b", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.51", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac1e36da-5d", "ovs_interfaceid": "ac1e36da-5de5-4451-a9e7-39165ab5f152", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.072961] env[70020]: DEBUG oslo_vmware.api [None req-2bb5f6cc-44d3-47fa-8675-c69478868134 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618432, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.158967] env[70020]: DEBUG nova.compute.manager [req-fe70b1d3-1385-45c8-8951-34d3e035e1bf req-dd4bd2cc-a23a-4cc6-a3fb-bf235faf0158 service nova] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Received event network-vif-deleted-c6bd89ff-30f7-46ff-b392-16a88577740a {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 896.285307] env[70020]: DEBUG nova.compute.manager [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 896.313325] env[70020]: DEBUG nova.virt.hardware [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 896.313565] env[70020]: DEBUG nova.virt.hardware [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 896.313717] env[70020]: DEBUG nova.virt.hardware [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 896.313927] env[70020]: DEBUG nova.virt.hardware [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 896.314113] env[70020]: DEBUG nova.virt.hardware [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 896.314264] env[70020]: DEBUG nova.virt.hardware [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 896.314471] env[70020]: DEBUG nova.virt.hardware [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 896.314626] env[70020]: DEBUG nova.virt.hardware [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 896.314931] env[70020]: DEBUG nova.virt.hardware [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 896.314931] env[70020]: DEBUG nova.virt.hardware [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 896.315525] env[70020]: DEBUG nova.virt.hardware [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 896.316954] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 8317f386-44d0-4b1b-8590-d0336fafac21 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 896.317099] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Migration b5628a9f-1bd9-44da-91e8-035e91b65f82 is active on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 192, 'DISK_GB': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 896.317230] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance b53f55c1-1867-410c-9c53-f552ff30d697 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 896.319477] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa3eb407-d4af-4b21-878b-9beaa809c3e5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.332859] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ba44653-2d7b-4aba-b1e2-60b49fd4f8c2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.442665] env[70020]: DEBUG nova.network.neutron [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Updating instance_info_cache with network_info: [{"id": "b41c227c-d57d-4d37-a05c-24351d6d22f1", "address": "fa:16:3e:0e:d5:b6", "network": {"id": "20e57d2a-47de-4fcb-a6e9-5a7f29bb8ffe", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1648505199", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.144", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5ae2c1c42704f49854f86cca4f8a95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb41c227c-d5", "ovs_interfaceid": "b41c227c-d57d-4d37-a05c-24351d6d22f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "926fd7e2-c562-49a2-9146-1c38fc20e80a", "address": "fa:16:3e:97:f4:c5", "network": {"id": "16a0952a-d079-4e3f-8142-7471db7485ff", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1076788276", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.129", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a5ae2c1c42704f49854f86cca4f8a95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60e7ee7b-4d02-4d68-af2e-5ab7d9708120", "external-id": "nsx-vlan-transportzone-550", "segmentation_id": 550, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap926fd7e2-c5", "ovs_interfaceid": "926fd7e2-c562-49a2-9146-1c38fc20e80a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.515304] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Releasing lock "refresh_cache-b53f55c1-1867-410c-9c53-f552ff30d697" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 896.551302] env[70020]: INFO nova.compute.manager [None req-f6ca5bd7-49fa-4368-9e8a-1a667abea13b tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Took 0.55 seconds to detach 1 volumes for instance. [ 896.554848] env[70020]: DEBUG nova.compute.manager [None req-f6ca5bd7-49fa-4368-9e8a-1a667abea13b tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Deleting volume: b709f316-53b7-4e6a-a871-7ecc3270770e {{(pid=70020) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 896.570917] env[70020]: DEBUG oslo_vmware.api [None req-2bb5f6cc-44d3-47fa-8675-c69478868134 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618432, 'name': CreateSnapshot_Task, 'duration_secs': 0.547778} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.571233] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2bb5f6cc-44d3-47fa-8675-c69478868134 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Created Snapshot of the VM instance {{(pid=70020) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 896.571998] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db242bcd-b481-4952-8450-0eaebd0b3dff {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.681955] env[70020]: DEBUG nova.compute.manager [req-df936dfa-7d4b-4d9b-8a30-ee1c3f6cbab3 req-26364d6e-d420-46f9-b24b-67a064d48a2b service nova] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Received event network-changed-926fd7e2-c562-49a2-9146-1c38fc20e80a {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 896.681955] env[70020]: DEBUG nova.compute.manager [req-df936dfa-7d4b-4d9b-8a30-ee1c3f6cbab3 req-26364d6e-d420-46f9-b24b-67a064d48a2b service nova] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Refreshing instance network info cache due to event network-changed-926fd7e2-c562-49a2-9146-1c38fc20e80a. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 896.681955] env[70020]: DEBUG oslo_concurrency.lockutils [req-df936dfa-7d4b-4d9b-8a30-ee1c3f6cbab3 req-26364d6e-d420-46f9-b24b-67a064d48a2b service nova] Acquiring lock "refresh_cache-00232eca-da03-49ea-b62b-d9721739b0ec" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.824614] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance a8982c31-ea86-4a8d-b8c6-006263ef41f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 896.956021] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Releasing lock "refresh_cache-00232eca-da03-49ea-b62b-d9721739b0ec" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 896.956021] env[70020]: DEBUG nova.compute.manager [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Instance network_info: |[{"id": "b41c227c-d57d-4d37-a05c-24351d6d22f1", "address": "fa:16:3e:0e:d5:b6", "network": {"id": "20e57d2a-47de-4fcb-a6e9-5a7f29bb8ffe", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1648505199", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.144", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5ae2c1c42704f49854f86cca4f8a95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb41c227c-d5", "ovs_interfaceid": "b41c227c-d57d-4d37-a05c-24351d6d22f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "926fd7e2-c562-49a2-9146-1c38fc20e80a", "address": "fa:16:3e:97:f4:c5", "network": {"id": "16a0952a-d079-4e3f-8142-7471db7485ff", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1076788276", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.129", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a5ae2c1c42704f49854f86cca4f8a95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60e7ee7b-4d02-4d68-af2e-5ab7d9708120", "external-id": "nsx-vlan-transportzone-550", "segmentation_id": 550, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap926fd7e2-c5", "ovs_interfaceid": "926fd7e2-c562-49a2-9146-1c38fc20e80a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 896.956021] env[70020]: DEBUG oslo_concurrency.lockutils [req-df936dfa-7d4b-4d9b-8a30-ee1c3f6cbab3 req-26364d6e-d420-46f9-b24b-67a064d48a2b service nova] Acquired lock "refresh_cache-00232eca-da03-49ea-b62b-d9721739b0ec" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 896.956021] env[70020]: DEBUG nova.network.neutron [req-df936dfa-7d4b-4d9b-8a30-ee1c3f6cbab3 req-26364d6e-d420-46f9-b24b-67a064d48a2b service nova] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Refreshing network info cache for port 926fd7e2-c562-49a2-9146-1c38fc20e80a {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 896.956021] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0e:d5:b6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db1f7867-8524-469c-ab47-d2c9e2751d98', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b41c227c-d57d-4d37-a05c-24351d6d22f1', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:97:f4:c5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '60e7ee7b-4d02-4d68-af2e-5ab7d9708120', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '926fd7e2-c562-49a2-9146-1c38fc20e80a', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 896.977117] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 896.977532] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 896.979511] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fe2a3764-5d2d-4751-943f-fef91978dca9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.012966] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 897.012966] env[70020]: value = "task-3618434" [ 897.012966] env[70020]: _type = "Task" [ 897.012966] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.024962] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618434, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.047743] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a14f18b-6b34-41cc-b511-1915a1689d75 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.066630] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-176526e0-868e-41f0-81b0-a0484376f8b6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.078175] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Updating instance 'b53f55c1-1867-410c-9c53-f552ff30d697' progress to 83 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 897.093027] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2bb5f6cc-44d3-47fa-8675-c69478868134 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Creating linked-clone VM from snapshot {{(pid=70020) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 897.094603] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-f9547cc5-7b08-45f5-9f4d-d5ad93b420cd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.106825] env[70020]: DEBUG oslo_vmware.api [None req-2bb5f6cc-44d3-47fa-8675-c69478868134 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Waiting for the task: (returnval){ [ 897.106825] env[70020]: value = "task-3618435" [ 897.106825] env[70020]: _type = "Task" [ 897.106825] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.111208] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f6ca5bd7-49fa-4368-9e8a-1a667abea13b tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 897.118178] env[70020]: DEBUG oslo_vmware.api [None req-2bb5f6cc-44d3-47fa-8675-c69478868134 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618435, 'name': CloneVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.212500] env[70020]: DEBUG nova.network.neutron [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Successfully updated port: 6d51b5bb-0659-4302-96ab-4991ce36e722 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 897.328553] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 45926a02-d0fe-4274-ba47-b97b3e12e4cd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 897.527374] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618434, 'name': CreateVM_Task} progress is 25%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.586457] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 897.586777] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-da2c213c-57ce-49bd-9fa5-abd7c2def267 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.595542] env[70020]: DEBUG oslo_vmware.api [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for the task: (returnval){ [ 897.595542] env[70020]: value = "task-3618436" [ 897.595542] env[70020]: _type = "Task" [ 897.595542] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.604934] env[70020]: DEBUG oslo_vmware.api [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618436, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.615443] env[70020]: DEBUG oslo_vmware.api [None req-2bb5f6cc-44d3-47fa-8675-c69478868134 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618435, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.721384] env[70020]: DEBUG oslo_concurrency.lockutils [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquiring lock "refresh_cache-61875dcc-5b76-409b-987f-4ae875909257" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.721574] env[70020]: DEBUG oslo_concurrency.lockutils [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquired lock "refresh_cache-61875dcc-5b76-409b-987f-4ae875909257" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 897.721787] env[70020]: DEBUG nova.network.neutron [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 897.836122] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 897.911979] env[70020]: DEBUG nova.network.neutron [req-df936dfa-7d4b-4d9b-8a30-ee1c3f6cbab3 req-26364d6e-d420-46f9-b24b-67a064d48a2b service nova] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Updated VIF entry in instance network info cache for port 926fd7e2-c562-49a2-9146-1c38fc20e80a. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 897.912606] env[70020]: DEBUG nova.network.neutron [req-df936dfa-7d4b-4d9b-8a30-ee1c3f6cbab3 req-26364d6e-d420-46f9-b24b-67a064d48a2b service nova] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Updating instance_info_cache with network_info: [{"id": "b41c227c-d57d-4d37-a05c-24351d6d22f1", "address": "fa:16:3e:0e:d5:b6", "network": {"id": "20e57d2a-47de-4fcb-a6e9-5a7f29bb8ffe", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1648505199", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.144", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5ae2c1c42704f49854f86cca4f8a95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb41c227c-d5", "ovs_interfaceid": "b41c227c-d57d-4d37-a05c-24351d6d22f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "926fd7e2-c562-49a2-9146-1c38fc20e80a", "address": "fa:16:3e:97:f4:c5", "network": {"id": "16a0952a-d079-4e3f-8142-7471db7485ff", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1076788276", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.129", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a5ae2c1c42704f49854f86cca4f8a95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60e7ee7b-4d02-4d68-af2e-5ab7d9708120", "external-id": "nsx-vlan-transportzone-550", "segmentation_id": 550, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap926fd7e2-c5", "ovs_interfaceid": "926fd7e2-c562-49a2-9146-1c38fc20e80a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.024952] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618434, 'name': CreateVM_Task} progress is 25%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.106925] env[70020]: DEBUG oslo_vmware.api [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618436, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.117875] env[70020]: DEBUG oslo_vmware.api [None req-2bb5f6cc-44d3-47fa-8675-c69478868134 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618435, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.258900] env[70020]: DEBUG nova.network.neutron [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 898.336148] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance edef9245-4048-4ea4-90cc-ebed54498d88 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 898.411720] env[70020]: DEBUG nova.network.neutron [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Updating instance_info_cache with network_info: [{"id": "6d51b5bb-0659-4302-96ab-4991ce36e722", "address": "fa:16:3e:ed:3c:5e", "network": {"id": "43f80db7-dce1-4f89-90ff-8ba5981812e4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-604035824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "19128323d60a4992b0a2f837317d3f04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d51b5bb-06", "ovs_interfaceid": "6d51b5bb-0659-4302-96ab-4991ce36e722", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.416209] env[70020]: DEBUG oslo_concurrency.lockutils [req-df936dfa-7d4b-4d9b-8a30-ee1c3f6cbab3 req-26364d6e-d420-46f9-b24b-67a064d48a2b service nova] Releasing lock "refresh_cache-00232eca-da03-49ea-b62b-d9721739b0ec" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 898.462437] env[70020]: DEBUG nova.compute.manager [req-7d25435e-d132-47a0-b527-5110fbb93a8e req-7069280a-d154-49ea-8ba6-8a57a4437147 service nova] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Received event network-vif-plugged-6d51b5bb-0659-4302-96ab-4991ce36e722 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 898.462917] env[70020]: DEBUG oslo_concurrency.lockutils [req-7d25435e-d132-47a0-b527-5110fbb93a8e req-7069280a-d154-49ea-8ba6-8a57a4437147 service nova] Acquiring lock "61875dcc-5b76-409b-987f-4ae875909257-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.463310] env[70020]: DEBUG oslo_concurrency.lockutils [req-7d25435e-d132-47a0-b527-5110fbb93a8e req-7069280a-d154-49ea-8ba6-8a57a4437147 service nova] Lock "61875dcc-5b76-409b-987f-4ae875909257-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.463606] env[70020]: DEBUG oslo_concurrency.lockutils [req-7d25435e-d132-47a0-b527-5110fbb93a8e req-7069280a-d154-49ea-8ba6-8a57a4437147 service nova] Lock "61875dcc-5b76-409b-987f-4ae875909257-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 898.463761] env[70020]: DEBUG nova.compute.manager [req-7d25435e-d132-47a0-b527-5110fbb93a8e req-7069280a-d154-49ea-8ba6-8a57a4437147 service nova] [instance: 61875dcc-5b76-409b-987f-4ae875909257] No waiting events found dispatching network-vif-plugged-6d51b5bb-0659-4302-96ab-4991ce36e722 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 898.463940] env[70020]: WARNING nova.compute.manager [req-7d25435e-d132-47a0-b527-5110fbb93a8e req-7069280a-d154-49ea-8ba6-8a57a4437147 service nova] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Received unexpected event network-vif-plugged-6d51b5bb-0659-4302-96ab-4991ce36e722 for instance with vm_state building and task_state spawning. [ 898.464301] env[70020]: DEBUG nova.compute.manager [req-7d25435e-d132-47a0-b527-5110fbb93a8e req-7069280a-d154-49ea-8ba6-8a57a4437147 service nova] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Received event network-changed-6d51b5bb-0659-4302-96ab-4991ce36e722 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 898.464446] env[70020]: DEBUG nova.compute.manager [req-7d25435e-d132-47a0-b527-5110fbb93a8e req-7069280a-d154-49ea-8ba6-8a57a4437147 service nova] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Refreshing instance network info cache due to event network-changed-6d51b5bb-0659-4302-96ab-4991ce36e722. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 898.464607] env[70020]: DEBUG oslo_concurrency.lockutils [req-7d25435e-d132-47a0-b527-5110fbb93a8e req-7069280a-d154-49ea-8ba6-8a57a4437147 service nova] Acquiring lock "refresh_cache-61875dcc-5b76-409b-987f-4ae875909257" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.525696] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618434, 'name': CreateVM_Task} progress is 25%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.607864] env[70020]: DEBUG oslo_vmware.api [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618436, 'name': PowerOnVM_Task, 'duration_secs': 0.715788} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.608159] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 898.608350] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6726d354-5693-43d1-bb27-4c9920929ded tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Updating instance 'b53f55c1-1867-410c-9c53-f552ff30d697' progress to 100 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 898.623438] env[70020]: DEBUG oslo_vmware.api [None req-2bb5f6cc-44d3-47fa-8675-c69478868134 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618435, 'name': CloneVM_Task, 'duration_secs': 1.489883} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.623693] env[70020]: INFO nova.virt.vmwareapi.vmops [None req-2bb5f6cc-44d3-47fa-8675-c69478868134 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Created linked-clone VM from snapshot [ 898.624451] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8a1df94-b258-480e-bc92-e23fd6515fbf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.632591] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-2bb5f6cc-44d3-47fa-8675-c69478868134 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Uploading image e6dff619-7e86-49bd-a945-dfebd1e75264 {{(pid=70020) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 898.668378] env[70020]: DEBUG oslo_vmware.rw_handles [None req-2bb5f6cc-44d3-47fa-8675-c69478868134 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 898.668378] env[70020]: value = "vm-721718" [ 898.668378] env[70020]: _type = "VirtualMachine" [ 898.668378] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 898.668918] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-3e0fa2f9-69d6-4cf6-9049-5800d40a283f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.677776] env[70020]: DEBUG oslo_vmware.rw_handles [None req-2bb5f6cc-44d3-47fa-8675-c69478868134 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Lease: (returnval){ [ 898.677776] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]527bc2f6-8b99-4d3a-f347-9e7c48653fbb" [ 898.677776] env[70020]: _type = "HttpNfcLease" [ 898.677776] env[70020]: } obtained for exporting VM: (result){ [ 898.677776] env[70020]: value = "vm-721718" [ 898.677776] env[70020]: _type = "VirtualMachine" [ 898.677776] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 898.678260] env[70020]: DEBUG oslo_vmware.api [None req-2bb5f6cc-44d3-47fa-8675-c69478868134 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Waiting for the lease: (returnval){ [ 898.678260] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]527bc2f6-8b99-4d3a-f347-9e7c48653fbb" [ 898.678260] env[70020]: _type = "HttpNfcLease" [ 898.678260] env[70020]: } to be ready. {{(pid=70020) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 898.685863] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 898.685863] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]527bc2f6-8b99-4d3a-f347-9e7c48653fbb" [ 898.685863] env[70020]: _type = "HttpNfcLease" [ 898.685863] env[70020]: } is initializing. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 898.839250] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance ff4e958d-0068-429f-af76-5e7d4dd147f3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 898.914635] env[70020]: DEBUG oslo_concurrency.lockutils [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Releasing lock "refresh_cache-61875dcc-5b76-409b-987f-4ae875909257" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 898.914993] env[70020]: DEBUG nova.compute.manager [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Instance network_info: |[{"id": "6d51b5bb-0659-4302-96ab-4991ce36e722", "address": "fa:16:3e:ed:3c:5e", "network": {"id": "43f80db7-dce1-4f89-90ff-8ba5981812e4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-604035824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "19128323d60a4992b0a2f837317d3f04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d51b5bb-06", "ovs_interfaceid": "6d51b5bb-0659-4302-96ab-4991ce36e722", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 898.915382] env[70020]: DEBUG oslo_concurrency.lockutils [req-7d25435e-d132-47a0-b527-5110fbb93a8e req-7069280a-d154-49ea-8ba6-8a57a4437147 service nova] Acquired lock "refresh_cache-61875dcc-5b76-409b-987f-4ae875909257" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 898.915559] env[70020]: DEBUG nova.network.neutron [req-7d25435e-d132-47a0-b527-5110fbb93a8e req-7069280a-d154-49ea-8ba6-8a57a4437147 service nova] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Refreshing network info cache for port 6d51b5bb-0659-4302-96ab-4991ce36e722 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 898.916838] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ed:3c:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6d51b5bb-0659-4302-96ab-4991ce36e722', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 898.925755] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 898.926530] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 898.926760] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-143036b9-ab87-41b2-ba91-74afb1e487ed {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.947342] env[70020]: DEBUG nova.compute.manager [req-5bcc1510-198b-454d-ac15-7342a8bd7de9 req-836833c4-d3fa-4dd1-8e3e-e22239efc6e0 service nova] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Received event network-changed-759215b2-ed99-4281-9bf0-fb9379eab835 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 898.947775] env[70020]: DEBUG nova.compute.manager [req-5bcc1510-198b-454d-ac15-7342a8bd7de9 req-836833c4-d3fa-4dd1-8e3e-e22239efc6e0 service nova] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Refreshing instance network info cache due to event network-changed-759215b2-ed99-4281-9bf0-fb9379eab835. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 898.948037] env[70020]: DEBUG oslo_concurrency.lockutils [req-5bcc1510-198b-454d-ac15-7342a8bd7de9 req-836833c4-d3fa-4dd1-8e3e-e22239efc6e0 service nova] Acquiring lock "refresh_cache-422ca332-5952-443c-a22e-67b1b45df5b9" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.948168] env[70020]: DEBUG oslo_concurrency.lockutils [req-5bcc1510-198b-454d-ac15-7342a8bd7de9 req-836833c4-d3fa-4dd1-8e3e-e22239efc6e0 service nova] Acquired lock "refresh_cache-422ca332-5952-443c-a22e-67b1b45df5b9" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 898.948333] env[70020]: DEBUG nova.network.neutron [req-5bcc1510-198b-454d-ac15-7342a8bd7de9 req-836833c4-d3fa-4dd1-8e3e-e22239efc6e0 service nova] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Refreshing network info cache for port 759215b2-ed99-4281-9bf0-fb9379eab835 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 898.956960] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 898.956960] env[70020]: value = "task-3618438" [ 898.956960] env[70020]: _type = "Task" [ 898.956960] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.969951] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618438, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.031771] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618434, 'name': CreateVM_Task, 'duration_secs': 1.792493} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.032062] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 899.033240] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.033492] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 899.033998] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 899.034340] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45d093f7-68aa-49fe-bea7-b9c0212c6414 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.041939] env[70020]: DEBUG oslo_vmware.api [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Waiting for the task: (returnval){ [ 899.041939] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52158b74-65e8-6adb-f672-443ed1d4196c" [ 899.041939] env[70020]: _type = "Task" [ 899.041939] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.056015] env[70020]: DEBUG oslo_vmware.api [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52158b74-65e8-6adb-f672-443ed1d4196c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.187805] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 899.187805] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]527bc2f6-8b99-4d3a-f347-9e7c48653fbb" [ 899.187805] env[70020]: _type = "HttpNfcLease" [ 899.187805] env[70020]: } is ready. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 899.188105] env[70020]: DEBUG oslo_vmware.rw_handles [None req-2bb5f6cc-44d3-47fa-8675-c69478868134 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 899.188105] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]527bc2f6-8b99-4d3a-f347-9e7c48653fbb" [ 899.188105] env[70020]: _type = "HttpNfcLease" [ 899.188105] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 899.188826] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49f8fb3e-4cbd-49b9-92f5-ebfb22c57b5b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.197122] env[70020]: DEBUG oslo_vmware.rw_handles [None req-2bb5f6cc-44d3-47fa-8675-c69478868134 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52407c68-a959-fc5d-fcb4-a1640e206c72/disk-0.vmdk from lease info. {{(pid=70020) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 899.197300] env[70020]: DEBUG oslo_vmware.rw_handles [None req-2bb5f6cc-44d3-47fa-8675-c69478868134 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52407c68-a959-fc5d-fcb4-a1640e206c72/disk-0.vmdk for reading. {{(pid=70020) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 899.309829] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-6628e555-39b7-4819-991d-357cde0da6ff {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.348308] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 3a4f2342-58e7-436b-a779-0fa093b52409 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 899.475207] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618438, 'name': CreateVM_Task, 'duration_secs': 0.480932} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.475413] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 899.476142] env[70020]: DEBUG oslo_concurrency.lockutils [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.560018] env[70020]: DEBUG oslo_vmware.api [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52158b74-65e8-6adb-f672-443ed1d4196c, 'name': SearchDatastore_Task, 'duration_secs': 0.013696} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.560018] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 899.560018] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 899.560018] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.560018] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 899.560877] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 899.561428] env[70020]: DEBUG oslo_concurrency.lockutils [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 899.562617] env[70020]: DEBUG oslo_concurrency.lockutils [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 899.563700] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-de366a84-8c5d-4a65-b982-c0e5fd32153b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.569028] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46ecd089-e9c7-4ceb-8ba3-0ea5fa031428 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.579302] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 899.579302] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52ed611c-7689-74d9-c854-8e78361be3a6" [ 899.579302] env[70020]: _type = "Task" [ 899.579302] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.584787] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 899.585194] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 899.589294] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2789c805-e6fa-41e7-954b-c3a418230d8c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.592336] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ed611c-7689-74d9-c854-8e78361be3a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.599339] env[70020]: DEBUG oslo_vmware.api [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Waiting for the task: (returnval){ [ 899.599339] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]525c39cf-8f0f-3621-cbd5-0354599df5fe" [ 899.599339] env[70020]: _type = "Task" [ 899.599339] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.609029] env[70020]: DEBUG oslo_vmware.api [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]525c39cf-8f0f-3621-cbd5-0354599df5fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.852433] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 9dec24d6-af8a-41b9-920c-e4420fc69417 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 899.949410] env[70020]: DEBUG nova.network.neutron [req-5bcc1510-198b-454d-ac15-7342a8bd7de9 req-836833c4-d3fa-4dd1-8e3e-e22239efc6e0 service nova] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Updated VIF entry in instance network info cache for port 759215b2-ed99-4281-9bf0-fb9379eab835. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 899.949889] env[70020]: DEBUG nova.network.neutron [req-5bcc1510-198b-454d-ac15-7342a8bd7de9 req-836833c4-d3fa-4dd1-8e3e-e22239efc6e0 service nova] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Updating instance_info_cache with network_info: [{"id": "759215b2-ed99-4281-9bf0-fb9379eab835", "address": "fa:16:3e:4a:93:7c", "network": {"id": "f1c779c8-1ed2-48d1-8344-30e93bf527ed", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-625584571-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "32f7008f815f482f992ddbc4906664b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "085fb0ff-9285-4f1d-a008-a14da4844357", "external-id": "nsx-vlan-transportzone-729", "segmentation_id": 729, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap759215b2-ed", "ovs_interfaceid": "759215b2-ed99-4281-9bf0-fb9379eab835", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.018017] env[70020]: DEBUG nova.network.neutron [req-7d25435e-d132-47a0-b527-5110fbb93a8e req-7069280a-d154-49ea-8ba6-8a57a4437147 service nova] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Updated VIF entry in instance network info cache for port 6d51b5bb-0659-4302-96ab-4991ce36e722. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 900.018017] env[70020]: DEBUG nova.network.neutron [req-7d25435e-d132-47a0-b527-5110fbb93a8e req-7069280a-d154-49ea-8ba6-8a57a4437147 service nova] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Updating instance_info_cache with network_info: [{"id": "6d51b5bb-0659-4302-96ab-4991ce36e722", "address": "fa:16:3e:ed:3c:5e", "network": {"id": "43f80db7-dce1-4f89-90ff-8ba5981812e4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-604035824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "19128323d60a4992b0a2f837317d3f04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d51b5bb-06", "ovs_interfaceid": "6d51b5bb-0659-4302-96ab-4991ce36e722", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.093412] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ed611c-7689-74d9-c854-8e78361be3a6, 'name': SearchDatastore_Task, 'duration_secs': 0.017319} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.095960] env[70020]: DEBUG oslo_concurrency.lockutils [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 900.096243] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 900.096644] env[70020]: DEBUG oslo_concurrency.lockutils [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.113648] env[70020]: DEBUG oslo_vmware.api [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]525c39cf-8f0f-3621-cbd5-0354599df5fe, 'name': SearchDatastore_Task, 'duration_secs': 0.011385} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.114657] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4290d2a8-66f5-468c-99b3-527af5e2eed8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.121655] env[70020]: DEBUG oslo_vmware.api [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Waiting for the task: (returnval){ [ 900.121655] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52cf9c48-82c1-02cc-1acd-c26e3bc7d289" [ 900.121655] env[70020]: _type = "Task" [ 900.121655] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.138349] env[70020]: DEBUG oslo_vmware.api [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52cf9c48-82c1-02cc-1acd-c26e3bc7d289, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.365129] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance abc194e3-fb6a-4f2a-8886-e2777530a2a3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 900.456670] env[70020]: DEBUG oslo_concurrency.lockutils [req-5bcc1510-198b-454d-ac15-7342a8bd7de9 req-836833c4-d3fa-4dd1-8e3e-e22239efc6e0 service nova] Releasing lock "refresh_cache-422ca332-5952-443c-a22e-67b1b45df5b9" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 900.457340] env[70020]: DEBUG nova.compute.manager [req-5bcc1510-198b-454d-ac15-7342a8bd7de9 req-836833c4-d3fa-4dd1-8e3e-e22239efc6e0 service nova] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Received event network-changed-759215b2-ed99-4281-9bf0-fb9379eab835 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 900.457649] env[70020]: DEBUG nova.compute.manager [req-5bcc1510-198b-454d-ac15-7342a8bd7de9 req-836833c4-d3fa-4dd1-8e3e-e22239efc6e0 service nova] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Refreshing instance network info cache due to event network-changed-759215b2-ed99-4281-9bf0-fb9379eab835. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 900.457972] env[70020]: DEBUG oslo_concurrency.lockutils [req-5bcc1510-198b-454d-ac15-7342a8bd7de9 req-836833c4-d3fa-4dd1-8e3e-e22239efc6e0 service nova] Acquiring lock "refresh_cache-422ca332-5952-443c-a22e-67b1b45df5b9" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.458159] env[70020]: DEBUG oslo_concurrency.lockutils [req-5bcc1510-198b-454d-ac15-7342a8bd7de9 req-836833c4-d3fa-4dd1-8e3e-e22239efc6e0 service nova] Acquired lock "refresh_cache-422ca332-5952-443c-a22e-67b1b45df5b9" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 900.458391] env[70020]: DEBUG nova.network.neutron [req-5bcc1510-198b-454d-ac15-7342a8bd7de9 req-836833c4-d3fa-4dd1-8e3e-e22239efc6e0 service nova] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Refreshing network info cache for port 759215b2-ed99-4281-9bf0-fb9379eab835 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 900.519086] env[70020]: DEBUG oslo_concurrency.lockutils [req-7d25435e-d132-47a0-b527-5110fbb93a8e req-7069280a-d154-49ea-8ba6-8a57a4437147 service nova] Releasing lock "refresh_cache-61875dcc-5b76-409b-987f-4ae875909257" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 900.635531] env[70020]: DEBUG oslo_vmware.api [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52cf9c48-82c1-02cc-1acd-c26e3bc7d289, 'name': SearchDatastore_Task, 'duration_secs': 0.020437} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.636344] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 900.636766] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 00232eca-da03-49ea-b62b-d9721739b0ec/00232eca-da03-49ea-b62b-d9721739b0ec.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 900.637401] env[70020]: DEBUG oslo_concurrency.lockutils [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 900.637706] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 900.638086] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-280ea803-89e6-43bd-8592-38ce6be0ce68 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.640838] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5edf1aed-538e-4014-bcba-54407cbe65f9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.654051] env[70020]: DEBUG oslo_vmware.api [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Waiting for the task: (returnval){ [ 900.654051] env[70020]: value = "task-3618439" [ 900.654051] env[70020]: _type = "Task" [ 900.654051] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.658034] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 900.658034] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 900.661594] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a3f8ace-168d-4eeb-b29a-5494907e4f67 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.672587] env[70020]: DEBUG oslo_vmware.api [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': task-3618439, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.672964] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 900.672964] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]523f36fc-8f3c-3d05-0771-900718ab8007" [ 900.672964] env[70020]: _type = "Task" [ 900.672964] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.682294] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]523f36fc-8f3c-3d05-0771-900718ab8007, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.871684] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 2ccd34c8-b433-41be-b800-d06a0595bff9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 901.171103] env[70020]: DEBUG oslo_vmware.api [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': task-3618439, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.187894] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]523f36fc-8f3c-3d05-0771-900718ab8007, 'name': SearchDatastore_Task, 'duration_secs': 0.012239} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.191488] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-264f5855-5f44-45f1-b499-9b86572081e4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.199685] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 901.199685] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]521ac047-ff88-a6c7-56a9-30b3b1ea2890" [ 901.199685] env[70020]: _type = "Task" [ 901.199685] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.212827] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]521ac047-ff88-a6c7-56a9-30b3b1ea2890, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.335378] env[70020]: DEBUG nova.network.neutron [req-5bcc1510-198b-454d-ac15-7342a8bd7de9 req-836833c4-d3fa-4dd1-8e3e-e22239efc6e0 service nova] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Updated VIF entry in instance network info cache for port 759215b2-ed99-4281-9bf0-fb9379eab835. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 901.335939] env[70020]: DEBUG nova.network.neutron [req-5bcc1510-198b-454d-ac15-7342a8bd7de9 req-836833c4-d3fa-4dd1-8e3e-e22239efc6e0 service nova] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Updating instance_info_cache with network_info: [{"id": "759215b2-ed99-4281-9bf0-fb9379eab835", "address": "fa:16:3e:4a:93:7c", "network": {"id": "f1c779c8-1ed2-48d1-8344-30e93bf527ed", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-625584571-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "32f7008f815f482f992ddbc4906664b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "085fb0ff-9285-4f1d-a008-a14da4844357", "external-id": "nsx-vlan-transportzone-729", "segmentation_id": 729, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap759215b2-ed", "ovs_interfaceid": "759215b2-ed99-4281-9bf0-fb9379eab835", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.376899] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance d45966fe-98ff-4466-8e7e-90550034742f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 901.646059] env[70020]: DEBUG nova.compute.manager [req-9f9d8e7f-4615-4fe8-bd86-f90d36d0e16f req-f49259ba-9af8-47e4-b787-66bd3426df1e service nova] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Received event network-changed-759215b2-ed99-4281-9bf0-fb9379eab835 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 901.646285] env[70020]: DEBUG nova.compute.manager [req-9f9d8e7f-4615-4fe8-bd86-f90d36d0e16f req-f49259ba-9af8-47e4-b787-66bd3426df1e service nova] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Refreshing instance network info cache due to event network-changed-759215b2-ed99-4281-9bf0-fb9379eab835. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 901.646474] env[70020]: DEBUG oslo_concurrency.lockutils [req-9f9d8e7f-4615-4fe8-bd86-f90d36d0e16f req-f49259ba-9af8-47e4-b787-66bd3426df1e service nova] Acquiring lock "refresh_cache-422ca332-5952-443c-a22e-67b1b45df5b9" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.671911] env[70020]: DEBUG oslo_vmware.api [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': task-3618439, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.655611} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.672202] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 00232eca-da03-49ea-b62b-d9721739b0ec/00232eca-da03-49ea-b62b-d9721739b0ec.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 901.672416] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 901.672676] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f318fb9f-bb83-48ae-bc97-75b47a0171ea {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.681871] env[70020]: DEBUG oslo_vmware.api [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Waiting for the task: (returnval){ [ 901.681871] env[70020]: value = "task-3618440" [ 901.681871] env[70020]: _type = "Task" [ 901.681871] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.693592] env[70020]: DEBUG oslo_vmware.api [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': task-3618440, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.714244] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]521ac047-ff88-a6c7-56a9-30b3b1ea2890, 'name': SearchDatastore_Task, 'duration_secs': 0.055798} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.714581] env[70020]: DEBUG oslo_concurrency.lockutils [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 901.714896] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 61875dcc-5b76-409b-987f-4ae875909257/61875dcc-5b76-409b-987f-4ae875909257.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 901.715154] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ff8cdfbd-9f86-4682-9b30-e01a8ced1af6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.725240] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 901.725240] env[70020]: value = "task-3618441" [ 901.725240] env[70020]: _type = "Task" [ 901.725240] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.736245] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618441, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.843380] env[70020]: DEBUG oslo_concurrency.lockutils [req-5bcc1510-198b-454d-ac15-7342a8bd7de9 req-836833c4-d3fa-4dd1-8e3e-e22239efc6e0 service nova] Releasing lock "refresh_cache-422ca332-5952-443c-a22e-67b1b45df5b9" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 901.843904] env[70020]: DEBUG oslo_concurrency.lockutils [req-9f9d8e7f-4615-4fe8-bd86-f90d36d0e16f req-f49259ba-9af8-47e4-b787-66bd3426df1e service nova] Acquired lock "refresh_cache-422ca332-5952-443c-a22e-67b1b45df5b9" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 901.844171] env[70020]: DEBUG nova.network.neutron [req-9f9d8e7f-4615-4fe8-bd86-f90d36d0e16f req-f49259ba-9af8-47e4-b787-66bd3426df1e service nova] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Refreshing network info cache for port 759215b2-ed99-4281-9bf0-fb9379eab835 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 901.883771] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance da07cb36-244f-4f48-a5b6-8d00324c1edf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 901.883771] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Total usable vcpus: 48, total allocated vcpus: 20 {{(pid=70020) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 901.883771] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4480MB phys_disk=200GB used_disk=19GB total_vcpus=48 used_vcpus=20 pci_stats=[] {{(pid=70020) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 902.199995] env[70020]: DEBUG oslo_vmware.api [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': task-3618440, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076664} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.204052] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 902.205510] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeef27e5-a432-4f6f-a26a-23345edacb92 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.235999] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] 00232eca-da03-49ea-b62b-d9721739b0ec/00232eca-da03-49ea-b62b-d9721739b0ec.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 902.244162] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-79ac251d-8620-4845-b7e6-d58f388c918e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.271090] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618441, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.272771] env[70020]: DEBUG oslo_vmware.api [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Waiting for the task: (returnval){ [ 902.272771] env[70020]: value = "task-3618442" [ 902.272771] env[70020]: _type = "Task" [ 902.272771] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.287970] env[70020]: DEBUG oslo_vmware.api [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': task-3618442, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.578101] env[70020]: DEBUG nova.network.neutron [req-9f9d8e7f-4615-4fe8-bd86-f90d36d0e16f req-f49259ba-9af8-47e4-b787-66bd3426df1e service nova] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Updated VIF entry in instance network info cache for port 759215b2-ed99-4281-9bf0-fb9379eab835. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 902.578529] env[70020]: DEBUG nova.network.neutron [req-9f9d8e7f-4615-4fe8-bd86-f90d36d0e16f req-f49259ba-9af8-47e4-b787-66bd3426df1e service nova] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Updating instance_info_cache with network_info: [{"id": "759215b2-ed99-4281-9bf0-fb9379eab835", "address": "fa:16:3e:4a:93:7c", "network": {"id": "f1c779c8-1ed2-48d1-8344-30e93bf527ed", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-625584571-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "32f7008f815f482f992ddbc4906664b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "085fb0ff-9285-4f1d-a008-a14da4844357", "external-id": "nsx-vlan-transportzone-729", "segmentation_id": 729, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap759215b2-ed", "ovs_interfaceid": "759215b2-ed99-4281-9bf0-fb9379eab835", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.626805] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a60fceb-daea-4664-a308-36183f81d4c7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.636143] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e28093cf-a8c2-4574-9873-0217cb351184 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.674185] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdc45d15-c58f-42d6-ab06-9c150221f9ed {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.683227] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb9a0dc1-8369-4b04-875e-56c60a6858e4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.701138] env[70020]: DEBUG nova.compute.provider_tree [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 902.744760] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618441, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.719959} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.745129] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 61875dcc-5b76-409b-987f-4ae875909257/61875dcc-5b76-409b-987f-4ae875909257.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 902.745456] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 902.745802] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-48d04e45-76e1-4e61-b405-ae7a49138ede {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.755686] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 902.755686] env[70020]: value = "task-3618443" [ 902.755686] env[70020]: _type = "Task" [ 902.755686] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.770264] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618443, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.788490] env[70020]: DEBUG oslo_vmware.api [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': task-3618442, 'name': ReconfigVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.877870] env[70020]: DEBUG nova.network.neutron [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Port ac1e36da-5de5-4451-a9e7-39165ab5f152 binding to destination host cpu-1 is already ACTIVE {{(pid=70020) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 902.878763] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquiring lock "refresh_cache-b53f55c1-1867-410c-9c53-f552ff30d697" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.878763] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquired lock "refresh_cache-b53f55c1-1867-410c-9c53-f552ff30d697" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 902.878763] env[70020]: DEBUG nova.network.neutron [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 903.083980] env[70020]: DEBUG oslo_concurrency.lockutils [req-9f9d8e7f-4615-4fe8-bd86-f90d36d0e16f req-f49259ba-9af8-47e4-b787-66bd3426df1e service nova] Releasing lock "refresh_cache-422ca332-5952-443c-a22e-67b1b45df5b9" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 903.205131] env[70020]: DEBUG nova.scheduler.client.report [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 903.268327] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618443, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079149} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.268637] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 903.269516] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d1f467d-834e-4b53-a0d7-7d4f317236ae {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.297970] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] 61875dcc-5b76-409b-987f-4ae875909257/61875dcc-5b76-409b-987f-4ae875909257.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 903.298854] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f2ec3ff-8973-486d-a6bc-2abd469b3d49 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.317282] env[70020]: DEBUG oslo_vmware.api [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': task-3618442, 'name': ReconfigVM_Task, 'duration_secs': 0.523487} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.317934] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Reconfigured VM instance instance-00000043 to attach disk [datastore1] 00232eca-da03-49ea-b62b-d9721739b0ec/00232eca-da03-49ea-b62b-d9721739b0ec.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 903.318653] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-035f5580-e7b1-48f8-9f2f-613e4437bfe9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.327025] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 903.327025] env[70020]: value = "task-3618444" [ 903.327025] env[70020]: _type = "Task" [ 903.327025] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.328974] env[70020]: DEBUG oslo_vmware.api [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Waiting for the task: (returnval){ [ 903.328974] env[70020]: value = "task-3618445" [ 903.328974] env[70020]: _type = "Task" [ 903.328974] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.336046] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618444, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.342208] env[70020]: DEBUG oslo_vmware.api [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': task-3618445, 'name': Rename_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.638924] env[70020]: DEBUG nova.network.neutron [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Updating instance_info_cache with network_info: [{"id": "ac1e36da-5de5-4451-a9e7-39165ab5f152", "address": "fa:16:3e:c2:87:4b", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.51", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac1e36da-5d", "ovs_interfaceid": "ac1e36da-5de5-4451-a9e7-39165ab5f152", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.710544] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=70020) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 903.710950] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 9.469s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 903.711368] env[70020]: DEBUG oslo_concurrency.lockutils [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 42.967s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 903.717598] env[70020]: INFO nova.compute.claims [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 903.731059] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 903.731059] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Cleaning up deleted instances {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11834}} [ 903.851240] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618444, 'name': ReconfigVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.856421] env[70020]: DEBUG oslo_vmware.api [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': task-3618445, 'name': Rename_Task, 'duration_secs': 0.202218} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.856886] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 903.857276] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-95f08fb2-243c-4e3a-a2e7-b26658708f07 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.869667] env[70020]: DEBUG oslo_vmware.api [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Waiting for the task: (returnval){ [ 903.869667] env[70020]: value = "task-3618446" [ 903.869667] env[70020]: _type = "Task" [ 903.869667] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.883746] env[70020]: DEBUG oslo_vmware.api [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': task-3618446, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.096359] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Acquiring lock "a39731d2-0b9b-41fa-b9ac-f80193a26d20" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 904.097106] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Lock "a39731d2-0b9b-41fa-b9ac-f80193a26d20" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 904.103396] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Acquiring lock "563512c2-b80f-4f14-add5-d48e2b7a0ee9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 904.103888] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Lock "563512c2-b80f-4f14-add5-d48e2b7a0ee9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 904.142025] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Releasing lock "refresh_cache-b53f55c1-1867-410c-9c53-f552ff30d697" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 904.181814] env[70020]: DEBUG nova.compute.manager [req-9d9d372b-8ec8-4cc9-89cd-2608ca12291a req-ab76205a-a6e0-4654-b0e8-3f8c6375104b service nova] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Received event network-changed-759215b2-ed99-4281-9bf0-fb9379eab835 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 904.182120] env[70020]: DEBUG nova.compute.manager [req-9d9d372b-8ec8-4cc9-89cd-2608ca12291a req-ab76205a-a6e0-4654-b0e8-3f8c6375104b service nova] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Refreshing instance network info cache due to event network-changed-759215b2-ed99-4281-9bf0-fb9379eab835. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 904.182452] env[70020]: DEBUG oslo_concurrency.lockutils [req-9d9d372b-8ec8-4cc9-89cd-2608ca12291a req-ab76205a-a6e0-4654-b0e8-3f8c6375104b service nova] Acquiring lock "refresh_cache-422ca332-5952-443c-a22e-67b1b45df5b9" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.185035] env[70020]: DEBUG oslo_concurrency.lockutils [req-9d9d372b-8ec8-4cc9-89cd-2608ca12291a req-ab76205a-a6e0-4654-b0e8-3f8c6375104b service nova] Acquired lock "refresh_cache-422ca332-5952-443c-a22e-67b1b45df5b9" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 904.185035] env[70020]: DEBUG nova.network.neutron [req-9d9d372b-8ec8-4cc9-89cd-2608ca12291a req-ab76205a-a6e0-4654-b0e8-3f8c6375104b service nova] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Refreshing network info cache for port 759215b2-ed99-4281-9bf0-fb9379eab835 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 904.248193] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] There are 42 instances to clean {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11843}} [ 904.249017] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 38839949-c717-4f0b-97a7-108d87417b88] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 904.338571] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618444, 'name': ReconfigVM_Task, 'duration_secs': 0.564929} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.338919] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Reconfigured VM instance instance-00000044 to attach disk [datastore1] 61875dcc-5b76-409b-987f-4ae875909257/61875dcc-5b76-409b-987f-4ae875909257.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 904.339591] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0a922125-8eaa-4a9f-95fa-2dfa309c28fb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.349713] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 904.349713] env[70020]: value = "task-3618447" [ 904.349713] env[70020]: _type = "Task" [ 904.349713] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.359966] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618447, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.381148] env[70020]: DEBUG oslo_vmware.api [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': task-3618446, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.645345] env[70020]: DEBUG nova.compute.manager [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=70020) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 904.645807] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 904.752634] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 29d41731-4ae2-4cc4-bfda-b7356922c8ff] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 904.768833] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7b0ce31a-b84b-4f93-8233-a2279c30bdf5 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Acquiring lock "422ca332-5952-443c-a22e-67b1b45df5b9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 904.769825] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7b0ce31a-b84b-4f93-8233-a2279c30bdf5 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Lock "422ca332-5952-443c-a22e-67b1b45df5b9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 904.770195] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7b0ce31a-b84b-4f93-8233-a2279c30bdf5 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Acquiring lock "422ca332-5952-443c-a22e-67b1b45df5b9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 904.770816] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7b0ce31a-b84b-4f93-8233-a2279c30bdf5 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Lock "422ca332-5952-443c-a22e-67b1b45df5b9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 904.771016] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7b0ce31a-b84b-4f93-8233-a2279c30bdf5 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Lock "422ca332-5952-443c-a22e-67b1b45df5b9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 904.781433] env[70020]: INFO nova.compute.manager [None req-7b0ce31a-b84b-4f93-8233-a2279c30bdf5 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Terminating instance [ 904.868241] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618447, 'name': Rename_Task, 'duration_secs': 0.301984} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.868562] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 904.868831] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f09fe4bc-ff3f-43bc-ada5-5e432607358e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.883391] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 904.883391] env[70020]: value = "task-3618448" [ 904.883391] env[70020]: _type = "Task" [ 904.883391] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.889148] env[70020]: DEBUG oslo_vmware.api [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': task-3618446, 'name': PowerOnVM_Task, 'duration_secs': 0.992371} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.897623] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 904.897778] env[70020]: INFO nova.compute.manager [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Took 13.86 seconds to spawn the instance on the hypervisor. [ 904.899084] env[70020]: DEBUG nova.compute.manager [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 904.899269] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52507cec-7410-4594-82f8-5b9ab577f1af {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.917486] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618448, 'name': PowerOnVM_Task} progress is 33%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.955278] env[70020]: DEBUG nova.network.neutron [req-9d9d372b-8ec8-4cc9-89cd-2608ca12291a req-ab76205a-a6e0-4654-b0e8-3f8c6375104b service nova] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Updated VIF entry in instance network info cache for port 759215b2-ed99-4281-9bf0-fb9379eab835. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 904.956802] env[70020]: DEBUG nova.network.neutron [req-9d9d372b-8ec8-4cc9-89cd-2608ca12291a req-ab76205a-a6e0-4654-b0e8-3f8c6375104b service nova] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Updating instance_info_cache with network_info: [{"id": "759215b2-ed99-4281-9bf0-fb9379eab835", "address": "fa:16:3e:4a:93:7c", "network": {"id": "f1c779c8-1ed2-48d1-8344-30e93bf527ed", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-625584571-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "32f7008f815f482f992ddbc4906664b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "085fb0ff-9285-4f1d-a008-a14da4844357", "external-id": "nsx-vlan-transportzone-729", "segmentation_id": 729, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap759215b2-ed", "ovs_interfaceid": "759215b2-ed99-4281-9bf0-fb9379eab835", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.257238] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: f7a42358-f26a-4651-a929-d3836f050648] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 905.289040] env[70020]: DEBUG nova.compute.manager [None req-7b0ce31a-b84b-4f93-8233-a2279c30bdf5 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 905.289040] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-7b0ce31a-b84b-4f93-8233-a2279c30bdf5 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 905.289040] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9634887a-7496-46b7-b040-c0532fe51984 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.303290] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b0ce31a-b84b-4f93-8233-a2279c30bdf5 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 905.303604] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cc989e0c-25b5-4213-8d1a-8de5e90f4498 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.312527] env[70020]: DEBUG oslo_vmware.api [None req-7b0ce31a-b84b-4f93-8233-a2279c30bdf5 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Waiting for the task: (returnval){ [ 905.312527] env[70020]: value = "task-3618449" [ 905.312527] env[70020]: _type = "Task" [ 905.312527] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.328654] env[70020]: DEBUG oslo_vmware.api [None req-7b0ce31a-b84b-4f93-8233-a2279c30bdf5 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Task: {'id': task-3618449, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.399606] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618448, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.414546] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ffd9e8b-e333-4ef0-8990-5ef8234f7d13 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.423452] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90ff0d51-09bd-4350-a7b6-d4dfd00749cf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.472157] env[70020]: DEBUG oslo_concurrency.lockutils [req-9d9d372b-8ec8-4cc9-89cd-2608ca12291a req-ab76205a-a6e0-4654-b0e8-3f8c6375104b service nova] Releasing lock "refresh_cache-422ca332-5952-443c-a22e-67b1b45df5b9" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 905.473431] env[70020]: INFO nova.compute.manager [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Took 57.63 seconds to build instance. [ 905.475173] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b06d7f95-afe7-4429-9f63-b3a2925f1966 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.484688] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfbf6401-4e9f-4907-bc88-a854ea7a5184 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.501682] env[70020]: DEBUG nova.compute.provider_tree [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 905.761661] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: b99195a6-866e-4142-970a-42a0564889ef] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 905.829812] env[70020]: DEBUG oslo_vmware.api [None req-7b0ce31a-b84b-4f93-8233-a2279c30bdf5 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Task: {'id': task-3618449, 'name': PowerOffVM_Task, 'duration_secs': 0.424707} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.830837] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b0ce31a-b84b-4f93-8233-a2279c30bdf5 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 905.831121] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-7b0ce31a-b84b-4f93-8233-a2279c30bdf5 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 905.831394] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-48cc71db-2f87-41d2-9242-aa0f37ea55b2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.899938] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618448, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.905135] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-7b0ce31a-b84b-4f93-8233-a2279c30bdf5 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 905.905392] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-7b0ce31a-b84b-4f93-8233-a2279c30bdf5 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 905.905578] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b0ce31a-b84b-4f93-8233-a2279c30bdf5 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Deleting the datastore file [datastore2] 422ca332-5952-443c-a22e-67b1b45df5b9 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 905.905895] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-121317a2-5d6e-4121-b6a4-833f59892fc1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.914034] env[70020]: DEBUG oslo_vmware.api [None req-7b0ce31a-b84b-4f93-8233-a2279c30bdf5 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Waiting for the task: (returnval){ [ 905.914034] env[70020]: value = "task-3618451" [ 905.914034] env[70020]: _type = "Task" [ 905.914034] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.922983] env[70020]: DEBUG oslo_vmware.api [None req-7b0ce31a-b84b-4f93-8233-a2279c30bdf5 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Task: {'id': task-3618451, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.980105] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a22edcd-1bc2-46e9-b13f-bc207188a3d5 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Lock "00232eca-da03-49ea-b62b-d9721739b0ec" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.133s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 906.004743] env[70020]: DEBUG nova.scheduler.client.report [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 906.266230] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 36f15b0a-d57f-49d8-9510-1036e889a438] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 906.400759] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618448, 'name': PowerOnVM_Task, 'duration_secs': 1.214329} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.401115] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 906.401290] env[70020]: INFO nova.compute.manager [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Took 10.12 seconds to spawn the instance on the hypervisor. [ 906.401462] env[70020]: DEBUG nova.compute.manager [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 906.402281] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eef5497-1a22-4b3d-9682-f5e5167070a1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.428393] env[70020]: DEBUG oslo_vmware.api [None req-7b0ce31a-b84b-4f93-8233-a2279c30bdf5 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Task: {'id': task-3618451, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.482359] env[70020]: DEBUG nova.compute.manager [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 906.510597] env[70020]: DEBUG oslo_concurrency.lockutils [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.799s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 906.511193] env[70020]: DEBUG nova.compute.manager [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 906.513995] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 43.498s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 906.515815] env[70020]: INFO nova.compute.claims [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 906.761615] env[70020]: DEBUG oslo_concurrency.lockutils [None req-be6f6e60-270c-4235-834e-ddbca95a8ef0 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Acquiring lock "00232eca-da03-49ea-b62b-d9721739b0ec" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 906.761903] env[70020]: DEBUG oslo_concurrency.lockutils [None req-be6f6e60-270c-4235-834e-ddbca95a8ef0 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Lock "00232eca-da03-49ea-b62b-d9721739b0ec" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 906.762139] env[70020]: DEBUG oslo_concurrency.lockutils [None req-be6f6e60-270c-4235-834e-ddbca95a8ef0 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Acquiring lock "00232eca-da03-49ea-b62b-d9721739b0ec-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 906.762327] env[70020]: DEBUG oslo_concurrency.lockutils [None req-be6f6e60-270c-4235-834e-ddbca95a8ef0 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Lock "00232eca-da03-49ea-b62b-d9721739b0ec-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 906.762495] env[70020]: DEBUG oslo_concurrency.lockutils [None req-be6f6e60-270c-4235-834e-ddbca95a8ef0 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Lock "00232eca-da03-49ea-b62b-d9721739b0ec-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 906.764955] env[70020]: INFO nova.compute.manager [None req-be6f6e60-270c-4235-834e-ddbca95a8ef0 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Terminating instance [ 906.770454] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 48efbd17-ff4e-426a-a135-f43cae8c97d0] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 906.922998] env[70020]: INFO nova.compute.manager [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Took 56.73 seconds to build instance. [ 906.928388] env[70020]: DEBUG oslo_vmware.api [None req-7b0ce31a-b84b-4f93-8233-a2279c30bdf5 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Task: {'id': task-3618451, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.589634} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.928632] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b0ce31a-b84b-4f93-8233-a2279c30bdf5 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 906.928809] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-7b0ce31a-b84b-4f93-8233-a2279c30bdf5 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 906.928980] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-7b0ce31a-b84b-4f93-8233-a2279c30bdf5 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 906.929171] env[70020]: INFO nova.compute.manager [None req-7b0ce31a-b84b-4f93-8233-a2279c30bdf5 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Took 1.64 seconds to destroy the instance on the hypervisor. [ 906.929409] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7b0ce31a-b84b-4f93-8233-a2279c30bdf5 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 906.930166] env[70020]: DEBUG nova.compute.manager [-] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 906.930166] env[70020]: DEBUG nova.network.neutron [-] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 907.006912] env[70020]: DEBUG oslo_concurrency.lockutils [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 907.020636] env[70020]: DEBUG nova.compute.utils [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 907.024219] env[70020]: DEBUG nova.compute.manager [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 907.024461] env[70020]: DEBUG nova.network.neutron [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 907.074346] env[70020]: DEBUG nova.policy [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c9be1a256e3b49f7a93dad4d718d7deb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '19128323d60a4992b0a2f837317d3f04', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 907.230539] env[70020]: DEBUG nova.compute.manager [req-fe292be1-ec95-45fb-8178-7f8789aaef72 req-2a530834-6bd5-4759-b773-70a554e4bcf0 service nova] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Received event network-vif-deleted-759215b2-ed99-4281-9bf0-fb9379eab835 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 907.230873] env[70020]: INFO nova.compute.manager [req-fe292be1-ec95-45fb-8178-7f8789aaef72 req-2a530834-6bd5-4759-b773-70a554e4bcf0 service nova] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Neutron deleted interface 759215b2-ed99-4281-9bf0-fb9379eab835; detaching it from the instance and deleting it from the info cache [ 907.230950] env[70020]: DEBUG nova.network.neutron [req-fe292be1-ec95-45fb-8178-7f8789aaef72 req-2a530834-6bd5-4759-b773-70a554e4bcf0 service nova] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.269347] env[70020]: DEBUG nova.compute.manager [None req-be6f6e60-270c-4235-834e-ddbca95a8ef0 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 907.269604] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-be6f6e60-270c-4235-834e-ddbca95a8ef0 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 907.270712] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d723e52-3915-4c46-beca-0c4fcdf09791 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.274877] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: c56279e2-0fc6-4546-854c-82e5fda0e7a7] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 907.281869] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-be6f6e60-270c-4235-834e-ddbca95a8ef0 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 907.282438] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-26bdb056-31ea-46c7-aa6e-df43207ccb08 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.290145] env[70020]: DEBUG oslo_vmware.api [None req-be6f6e60-270c-4235-834e-ddbca95a8ef0 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Waiting for the task: (returnval){ [ 907.290145] env[70020]: value = "task-3618452" [ 907.290145] env[70020]: _type = "Task" [ 907.290145] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.299757] env[70020]: DEBUG oslo_vmware.api [None req-be6f6e60-270c-4235-834e-ddbca95a8ef0 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': task-3618452, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.425008] env[70020]: DEBUG oslo_concurrency.lockutils [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "61875dcc-5b76-409b-987f-4ae875909257" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.968s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 907.488711] env[70020]: DEBUG nova.network.neutron [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Successfully created port: 4896df9a-0702-4071-8432-b95ec01f1d13 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 907.525155] env[70020]: DEBUG nova.compute.manager [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 907.687302] env[70020]: DEBUG nova.network.neutron [-] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.734381] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-30550483-402e-43e4-93ff-1f7854d363f5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.745732] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebb4e6ac-2128-48c1-bcfa-73acb8abc34a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.777707] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 55c20886-ae10-4326-a9de-f8577f320a99] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 907.797445] env[70020]: DEBUG nova.compute.manager [req-fe292be1-ec95-45fb-8178-7f8789aaef72 req-2a530834-6bd5-4759-b773-70a554e4bcf0 service nova] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Detach interface failed, port_id=759215b2-ed99-4281-9bf0-fb9379eab835, reason: Instance 422ca332-5952-443c-a22e-67b1b45df5b9 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 907.807883] env[70020]: DEBUG oslo_vmware.api [None req-be6f6e60-270c-4235-834e-ddbca95a8ef0 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': task-3618452, 'name': PowerOffVM_Task, 'duration_secs': 0.266134} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.808214] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-be6f6e60-270c-4235-834e-ddbca95a8ef0 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 907.808393] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-be6f6e60-270c-4235-834e-ddbca95a8ef0 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 907.808619] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c9c7f150-0e4f-42c5-92b3-479632556ea8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.929462] env[70020]: DEBUG nova.compute.manager [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 907.936023] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-be6f6e60-270c-4235-834e-ddbca95a8ef0 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 907.936265] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-be6f6e60-270c-4235-834e-ddbca95a8ef0 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 907.936474] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-be6f6e60-270c-4235-834e-ddbca95a8ef0 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Deleting the datastore file [datastore1] 00232eca-da03-49ea-b62b-d9721739b0ec {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 907.937237] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-da9eed8a-b95b-4bb1-a41f-f00641616f44 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.945297] env[70020]: DEBUG oslo_vmware.api [None req-be6f6e60-270c-4235-834e-ddbca95a8ef0 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Waiting for the task: (returnval){ [ 907.945297] env[70020]: value = "task-3618454" [ 907.945297] env[70020]: _type = "Task" [ 907.945297] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.957338] env[70020]: DEBUG oslo_vmware.api [None req-be6f6e60-270c-4235-834e-ddbca95a8ef0 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': task-3618454, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.190545] env[70020]: INFO nova.compute.manager [-] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Took 1.26 seconds to deallocate network for instance. [ 908.214721] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-009dea0c-47f7-4e59-a4d6-38caa605adb4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.229026] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82afef51-416f-4bec-9a48-3657147f3404 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.264479] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fa3f191-40c9-4004-956d-6763dc11e85e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.276733] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13e01165-2036-4f54-896c-fbf1c99646e6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.294880] env[70020]: DEBUG nova.compute.provider_tree [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 908.298663] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: c08166c5-2c31-4d40-a61c-c541924eb49c] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 908.458764] env[70020]: DEBUG oslo_vmware.api [None req-be6f6e60-270c-4235-834e-ddbca95a8ef0 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Task: {'id': task-3618454, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.24332} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.459820] env[70020]: DEBUG oslo_concurrency.lockutils [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 908.460121] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-be6f6e60-270c-4235-834e-ddbca95a8ef0 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 908.460339] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-be6f6e60-270c-4235-834e-ddbca95a8ef0 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 908.460538] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-be6f6e60-270c-4235-834e-ddbca95a8ef0 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 908.460722] env[70020]: INFO nova.compute.manager [None req-be6f6e60-270c-4235-834e-ddbca95a8ef0 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Took 1.19 seconds to destroy the instance on the hypervisor. [ 908.460969] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-be6f6e60-270c-4235-834e-ddbca95a8ef0 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 908.461196] env[70020]: DEBUG nova.compute.manager [-] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 908.461294] env[70020]: DEBUG nova.network.neutron [-] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 908.542868] env[70020]: DEBUG nova.compute.manager [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 908.569252] env[70020]: DEBUG nova.virt.hardware [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 908.569516] env[70020]: DEBUG nova.virt.hardware [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 908.569668] env[70020]: DEBUG nova.virt.hardware [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 908.569845] env[70020]: DEBUG nova.virt.hardware [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 908.569987] env[70020]: DEBUG nova.virt.hardware [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 908.570157] env[70020]: DEBUG nova.virt.hardware [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 908.570362] env[70020]: DEBUG nova.virt.hardware [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 908.570515] env[70020]: DEBUG nova.virt.hardware [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 908.570678] env[70020]: DEBUG nova.virt.hardware [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 908.570836] env[70020]: DEBUG nova.virt.hardware [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 908.571011] env[70020]: DEBUG nova.virt.hardware [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 908.572382] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-642ff029-c4d9-4342-8499-07dcc6ab5a8e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.582036] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66276a9e-c0b0-4f5c-aba2-06b28fcf4fa8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.699125] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7b0ce31a-b84b-4f93-8233-a2279c30bdf5 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 908.798861] env[70020]: DEBUG nova.scheduler.client.report [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 908.803179] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 0add6226-3b90-4991-8f2b-81c35e72a7df] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 909.303846] env[70020]: DEBUG nova.compute.manager [req-f631e302-22a7-4540-8865-732085752f9d req-3d62df7b-693a-4817-a3de-74b1763e4fee service nova] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Received event network-vif-deleted-926fd7e2-c562-49a2-9146-1c38fc20e80a {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 909.304169] env[70020]: INFO nova.compute.manager [req-f631e302-22a7-4540-8865-732085752f9d req-3d62df7b-693a-4817-a3de-74b1763e4fee service nova] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Neutron deleted interface 926fd7e2-c562-49a2-9146-1c38fc20e80a; detaching it from the instance and deleting it from the info cache [ 909.304403] env[70020]: DEBUG nova.network.neutron [req-f631e302-22a7-4540-8865-732085752f9d req-3d62df7b-693a-4817-a3de-74b1763e4fee service nova] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Updating instance_info_cache with network_info: [{"id": "b41c227c-d57d-4d37-a05c-24351d6d22f1", "address": "fa:16:3e:0e:d5:b6", "network": {"id": "20e57d2a-47de-4fcb-a6e9-5a7f29bb8ffe", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1648505199", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.144", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5ae2c1c42704f49854f86cca4f8a95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb41c227c-d5", "ovs_interfaceid": "b41c227c-d57d-4d37-a05c-24351d6d22f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.307275] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.793s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 909.307591] env[70020]: DEBUG nova.compute.manager [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 909.311479] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: ae91adc5-b3a4-4503-91f2-d803eaefedc5] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 909.313489] env[70020]: DEBUG oslo_concurrency.lockutils [None req-55c54865-9714-426c-acf5-9dd59b5b911f tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 45.319s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 909.313688] env[70020]: DEBUG oslo_concurrency.lockutils [None req-55c54865-9714-426c-acf5-9dd59b5b911f tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 909.315926] env[70020]: DEBUG oslo_concurrency.lockutils [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.853s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 909.317424] env[70020]: INFO nova.compute.claims [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 909.350714] env[70020]: INFO nova.scheduler.client.report [None req-55c54865-9714-426c-acf5-9dd59b5b911f tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Deleted allocations for instance f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1 [ 909.423264] env[70020]: DEBUG oslo_vmware.rw_handles [None req-2bb5f6cc-44d3-47fa-8675-c69478868134 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52407c68-a959-fc5d-fcb4-a1640e206c72/disk-0.vmdk. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 909.424524] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38538258-5953-4fb8-a537-95e75da75289 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.432241] env[70020]: DEBUG oslo_vmware.rw_handles [None req-2bb5f6cc-44d3-47fa-8675-c69478868134 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52407c68-a959-fc5d-fcb4-a1640e206c72/disk-0.vmdk is in state: ready. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 909.432241] env[70020]: ERROR oslo_vmware.rw_handles [None req-2bb5f6cc-44d3-47fa-8675-c69478868134 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52407c68-a959-fc5d-fcb4-a1640e206c72/disk-0.vmdk due to incomplete transfer. [ 909.432241] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-2c432847-45e2-4c93-a56d-8db57bf060cd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.440359] env[70020]: DEBUG oslo_vmware.rw_handles [None req-2bb5f6cc-44d3-47fa-8675-c69478868134 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52407c68-a959-fc5d-fcb4-a1640e206c72/disk-0.vmdk. {{(pid=70020) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 909.440359] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-2bb5f6cc-44d3-47fa-8675-c69478868134 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Uploaded image e6dff619-7e86-49bd-a945-dfebd1e75264 to the Glance image server {{(pid=70020) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 909.442634] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bb5f6cc-44d3-47fa-8675-c69478868134 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Destroying the VM {{(pid=70020) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 909.443414] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e48c52b0-e057-4d9e-a03b-14972b7b4ffb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.451665] env[70020]: DEBUG oslo_vmware.api [None req-2bb5f6cc-44d3-47fa-8675-c69478868134 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Waiting for the task: (returnval){ [ 909.451665] env[70020]: value = "task-3618455" [ 909.451665] env[70020]: _type = "Task" [ 909.451665] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.461321] env[70020]: DEBUG nova.network.neutron [-] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.461321] env[70020]: DEBUG oslo_vmware.api [None req-2bb5f6cc-44d3-47fa-8675-c69478868134 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618455, 'name': Destroy_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.544675] env[70020]: DEBUG nova.compute.manager [req-ddbfff21-ca14-4367-a9da-29dd2e9054d5 req-88e1ef7e-8832-4e1f-98d4-55677118f8ee service nova] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Received event network-vif-plugged-4896df9a-0702-4071-8432-b95ec01f1d13 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 909.544884] env[70020]: DEBUG oslo_concurrency.lockutils [req-ddbfff21-ca14-4367-a9da-29dd2e9054d5 req-88e1ef7e-8832-4e1f-98d4-55677118f8ee service nova] Acquiring lock "d65ab5e0-189c-43e1-accf-16248ad02852-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 909.545242] env[70020]: DEBUG oslo_concurrency.lockutils [req-ddbfff21-ca14-4367-a9da-29dd2e9054d5 req-88e1ef7e-8832-4e1f-98d4-55677118f8ee service nova] Lock "d65ab5e0-189c-43e1-accf-16248ad02852-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 909.545462] env[70020]: DEBUG oslo_concurrency.lockutils [req-ddbfff21-ca14-4367-a9da-29dd2e9054d5 req-88e1ef7e-8832-4e1f-98d4-55677118f8ee service nova] Lock "d65ab5e0-189c-43e1-accf-16248ad02852-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 909.546194] env[70020]: DEBUG nova.compute.manager [req-ddbfff21-ca14-4367-a9da-29dd2e9054d5 req-88e1ef7e-8832-4e1f-98d4-55677118f8ee service nova] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] No waiting events found dispatching network-vif-plugged-4896df9a-0702-4071-8432-b95ec01f1d13 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 909.546194] env[70020]: WARNING nova.compute.manager [req-ddbfff21-ca14-4367-a9da-29dd2e9054d5 req-88e1ef7e-8832-4e1f-98d4-55677118f8ee service nova] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Received unexpected event network-vif-plugged-4896df9a-0702-4071-8432-b95ec01f1d13 for instance with vm_state building and task_state spawning. [ 909.569796] env[70020]: DEBUG nova.network.neutron [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Successfully updated port: 4896df9a-0702-4071-8432-b95ec01f1d13 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 909.815639] env[70020]: DEBUG nova.compute.utils [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 909.817786] env[70020]: DEBUG nova.compute.manager [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 909.818082] env[70020]: DEBUG nova.network.neutron [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 909.820949] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7ef13bf5-3605-4e0b-89cc-931fa1e4c42b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.824581] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 738d52c6-0368-434f-a14f-05b47ca865e3] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 909.839953] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcb9515c-54ad-44cc-ab3b-616601c17bd1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.860606] env[70020]: DEBUG oslo_concurrency.lockutils [None req-55c54865-9714-426c-acf5-9dd59b5b911f tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Lock "f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 48.943s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 909.898949] env[70020]: DEBUG nova.compute.manager [req-f631e302-22a7-4540-8865-732085752f9d req-3d62df7b-693a-4817-a3de-74b1763e4fee service nova] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Detach interface failed, port_id=926fd7e2-c562-49a2-9146-1c38fc20e80a, reason: Instance 00232eca-da03-49ea-b62b-d9721739b0ec could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 909.899316] env[70020]: DEBUG nova.compute.manager [req-f631e302-22a7-4540-8865-732085752f9d req-3d62df7b-693a-4817-a3de-74b1763e4fee service nova] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Received event network-vif-deleted-b41c227c-d57d-4d37-a05c-24351d6d22f1 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 909.899627] env[70020]: INFO nova.compute.manager [req-f631e302-22a7-4540-8865-732085752f9d req-3d62df7b-693a-4817-a3de-74b1763e4fee service nova] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Neutron deleted interface b41c227c-d57d-4d37-a05c-24351d6d22f1; detaching it from the instance and deleting it from the info cache [ 909.899923] env[70020]: DEBUG nova.network.neutron [req-f631e302-22a7-4540-8865-732085752f9d req-3d62df7b-693a-4817-a3de-74b1763e4fee service nova] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.903277] env[70020]: DEBUG nova.policy [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b67375d5e85b4ba99d47120945bbf0f6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '52cd193f3ca7403a986d72f072590f4f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 909.963499] env[70020]: INFO nova.compute.manager [-] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Took 1.50 seconds to deallocate network for instance. [ 909.963499] env[70020]: DEBUG oslo_vmware.api [None req-2bb5f6cc-44d3-47fa-8675-c69478868134 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618455, 'name': Destroy_Task, 'duration_secs': 0.417548} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.965029] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-2bb5f6cc-44d3-47fa-8675-c69478868134 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Destroyed the VM [ 909.965269] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2bb5f6cc-44d3-47fa-8675-c69478868134 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Deleting Snapshot of the VM instance {{(pid=70020) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 909.968389] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-da53e092-7131-4fa4-8a60-ffe53a22d7eb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.976627] env[70020]: DEBUG oslo_vmware.api [None req-2bb5f6cc-44d3-47fa-8675-c69478868134 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Waiting for the task: (returnval){ [ 909.976627] env[70020]: value = "task-3618456" [ 909.976627] env[70020]: _type = "Task" [ 909.976627] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.985526] env[70020]: DEBUG oslo_vmware.api [None req-2bb5f6cc-44d3-47fa-8675-c69478868134 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618456, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.072866] env[70020]: DEBUG oslo_concurrency.lockutils [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquiring lock "refresh_cache-d65ab5e0-189c-43e1-accf-16248ad02852" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.073155] env[70020]: DEBUG oslo_concurrency.lockutils [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquired lock "refresh_cache-d65ab5e0-189c-43e1-accf-16248ad02852" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 910.073443] env[70020]: DEBUG nova.network.neutron [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 910.253677] env[70020]: DEBUG nova.network.neutron [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Successfully created port: 5b0a839b-040e-424a-b8ad-91a46034cde9 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 910.318994] env[70020]: DEBUG nova.compute.manager [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 910.330622] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 8f7e4e69-0796-469f-8a2b-4e19fbf15ed3] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 910.407653] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1fb34c85-8967-43fb-aecc-52ed55bc42a4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.426509] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9bc64de-2dd1-4890-a5d2-7371d73d0139 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.466231] env[70020]: DEBUG nova.compute.manager [req-f631e302-22a7-4540-8865-732085752f9d req-3d62df7b-693a-4817-a3de-74b1763e4fee service nova] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Detach interface failed, port_id=b41c227c-d57d-4d37-a05c-24351d6d22f1, reason: Instance 00232eca-da03-49ea-b62b-d9721739b0ec could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 910.471671] env[70020]: DEBUG oslo_concurrency.lockutils [None req-be6f6e60-270c-4235-834e-ddbca95a8ef0 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 910.489228] env[70020]: DEBUG oslo_vmware.api [None req-2bb5f6cc-44d3-47fa-8675-c69478868134 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618456, 'name': RemoveSnapshot_Task} progress is 54%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.609997] env[70020]: DEBUG nova.network.neutron [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 910.766066] env[70020]: DEBUG nova.network.neutron [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Updating instance_info_cache with network_info: [{"id": "4896df9a-0702-4071-8432-b95ec01f1d13", "address": "fa:16:3e:83:90:c8", "network": {"id": "43f80db7-dce1-4f89-90ff-8ba5981812e4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-604035824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "19128323d60a4992b0a2f837317d3f04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4896df9a-07", "ovs_interfaceid": "4896df9a-0702-4071-8432-b95ec01f1d13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.837165] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 61bea079-9731-48d1-b472-b30226a0b5a1] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 910.987349] env[70020]: DEBUG oslo_vmware.api [None req-2bb5f6cc-44d3-47fa-8675-c69478868134 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618456, 'name': RemoveSnapshot_Task, 'duration_secs': 0.701162} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.988468] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2bb5f6cc-44d3-47fa-8675-c69478868134 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Deleted Snapshot of the VM instance {{(pid=70020) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 910.988689] env[70020]: INFO nova.compute.manager [None req-2bb5f6cc-44d3-47fa-8675-c69478868134 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Took 15.99 seconds to snapshot the instance on the hypervisor. [ 910.991485] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8df1800c-5263-4459-aac2-5f8aa5459399 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.000163] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e943dd2d-0877-427b-a010-f44320047f76 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.033516] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec8a781b-4c8a-4d25-9bf4-c62f064b9a0c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.043147] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88d437d6-9413-43f1-b0a2-d714b9a63907 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.057760] env[70020]: DEBUG nova.compute.provider_tree [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 911.271875] env[70020]: DEBUG oslo_concurrency.lockutils [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Releasing lock "refresh_cache-d65ab5e0-189c-43e1-accf-16248ad02852" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 911.272236] env[70020]: DEBUG nova.compute.manager [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Instance network_info: |[{"id": "4896df9a-0702-4071-8432-b95ec01f1d13", "address": "fa:16:3e:83:90:c8", "network": {"id": "43f80db7-dce1-4f89-90ff-8ba5981812e4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-604035824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "19128323d60a4992b0a2f837317d3f04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4896df9a-07", "ovs_interfaceid": "4896df9a-0702-4071-8432-b95ec01f1d13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 911.272662] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:83:90:c8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4896df9a-0702-4071-8432-b95ec01f1d13', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 911.280502] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 911.280735] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 911.280953] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0b53688a-0eca-4e79-82cb-2d66913c698a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.307457] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 911.307457] env[70020]: value = "task-3618457" [ 911.307457] env[70020]: _type = "Task" [ 911.307457] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.318092] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618457, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.329136] env[70020]: DEBUG nova.compute.manager [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 911.339728] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: b14c4ac4-7f1e-4b17-93a1-38eae3c77c0e] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 911.358301] env[70020]: DEBUG nova.virt.hardware [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='44df97868a0f5096e01bec1a0af6bd06',container_format='bare',created_at=2025-04-25T23:04:40Z,direct_url=,disk_format='vmdk',id=038d3b5b-38fb-498f-b4cc-5ed167e098c3,min_disk=1,min_ram=0,name='tempest-test-snap-794634412',owner='52cd193f3ca7403a986d72f072590f4f',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2025-04-25T23:04:57Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 911.358552] env[70020]: DEBUG nova.virt.hardware [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 911.358704] env[70020]: DEBUG nova.virt.hardware [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 911.358883] env[70020]: DEBUG nova.virt.hardware [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 911.359042] env[70020]: DEBUG nova.virt.hardware [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 911.359203] env[70020]: DEBUG nova.virt.hardware [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 911.359406] env[70020]: DEBUG nova.virt.hardware [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 911.359560] env[70020]: DEBUG nova.virt.hardware [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 911.359722] env[70020]: DEBUG nova.virt.hardware [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 911.359879] env[70020]: DEBUG nova.virt.hardware [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 911.360064] env[70020]: DEBUG nova.virt.hardware [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 911.361107] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45b8f317-1eaa-4550-ae09-b1c78c9f00f5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.370443] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c6bb6a5-7e0e-4f8b-96d9-b12e36c2de02 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.560734] env[70020]: DEBUG nova.scheduler.client.report [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 911.572720] env[70020]: DEBUG oslo_concurrency.lockutils [None req-521ffcfb-6b37-4755-abf4-a675e776b827 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Acquiring lock "ef0d716a-080e-4167-bd34-b2c660b95c88" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 911.573090] env[70020]: DEBUG oslo_concurrency.lockutils [None req-521ffcfb-6b37-4755-abf4-a675e776b827 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Lock "ef0d716a-080e-4167-bd34-b2c660b95c88" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 911.573372] env[70020]: DEBUG oslo_concurrency.lockutils [None req-521ffcfb-6b37-4755-abf4-a675e776b827 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Acquiring lock "ef0d716a-080e-4167-bd34-b2c660b95c88-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 911.573596] env[70020]: DEBUG oslo_concurrency.lockutils [None req-521ffcfb-6b37-4755-abf4-a675e776b827 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Lock "ef0d716a-080e-4167-bd34-b2c660b95c88-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 911.573796] env[70020]: DEBUG oslo_concurrency.lockutils [None req-521ffcfb-6b37-4755-abf4-a675e776b827 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Lock "ef0d716a-080e-4167-bd34-b2c660b95c88-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.576728] env[70020]: INFO nova.compute.manager [None req-521ffcfb-6b37-4755-abf4-a675e776b827 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Terminating instance [ 911.695521] env[70020]: DEBUG nova.compute.manager [req-26757c45-3803-41bf-b019-275900831492 req-3881a8a4-3609-4c60-bd31-9924a5efc5ec service nova] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Received event network-changed-4896df9a-0702-4071-8432-b95ec01f1d13 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 911.695726] env[70020]: DEBUG nova.compute.manager [req-26757c45-3803-41bf-b019-275900831492 req-3881a8a4-3609-4c60-bd31-9924a5efc5ec service nova] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Refreshing instance network info cache due to event network-changed-4896df9a-0702-4071-8432-b95ec01f1d13. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 911.695934] env[70020]: DEBUG oslo_concurrency.lockutils [req-26757c45-3803-41bf-b019-275900831492 req-3881a8a4-3609-4c60-bd31-9924a5efc5ec service nova] Acquiring lock "refresh_cache-d65ab5e0-189c-43e1-accf-16248ad02852" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.696102] env[70020]: DEBUG oslo_concurrency.lockutils [req-26757c45-3803-41bf-b019-275900831492 req-3881a8a4-3609-4c60-bd31-9924a5efc5ec service nova] Acquired lock "refresh_cache-d65ab5e0-189c-43e1-accf-16248ad02852" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 911.696266] env[70020]: DEBUG nova.network.neutron [req-26757c45-3803-41bf-b019-275900831492 req-3881a8a4-3609-4c60-bd31-9924a5efc5ec service nova] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Refreshing network info cache for port 4896df9a-0702-4071-8432-b95ec01f1d13 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 911.803525] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29371888-e02a-43ba-bcf6-3641d5f1ceac tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Acquiring lock "8bff6907-c2b0-4ad1-9298-b2d622d33fde" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 911.804488] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29371888-e02a-43ba-bcf6-3641d5f1ceac tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Lock "8bff6907-c2b0-4ad1-9298-b2d622d33fde" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 911.804488] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29371888-e02a-43ba-bcf6-3641d5f1ceac tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Acquiring lock "8bff6907-c2b0-4ad1-9298-b2d622d33fde-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 911.804488] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29371888-e02a-43ba-bcf6-3641d5f1ceac tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Lock "8bff6907-c2b0-4ad1-9298-b2d622d33fde-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 911.804488] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29371888-e02a-43ba-bcf6-3641d5f1ceac tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Lock "8bff6907-c2b0-4ad1-9298-b2d622d33fde-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.806538] env[70020]: INFO nova.compute.manager [None req-29371888-e02a-43ba-bcf6-3641d5f1ceac tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Terminating instance [ 911.818244] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618457, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.843616] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 6c36df58-3ab3-4595-b89c-9ab5a4664eec] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 912.066139] env[70020]: DEBUG oslo_concurrency.lockutils [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.750s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 912.066765] env[70020]: DEBUG nova.compute.manager [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 912.070534] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5e724a73-9268-4e66-98ca-3ffb35e160ae tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.217s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 912.070739] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5e724a73-9268-4e66-98ca-3ffb35e160ae tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 912.072790] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73fbd504-e53f-43f1-9171-7f60db02f2ed tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.595s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 912.073018] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73fbd504-e53f-43f1-9171-7f60db02f2ed tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 912.074741] env[70020]: DEBUG oslo_concurrency.lockutils [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 38.781s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 912.074925] env[70020]: DEBUG nova.objects.instance [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Trying to apply a migration context that does not seem to be set for this instance {{(pid=70020) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 912.084296] env[70020]: DEBUG nova.compute.manager [None req-521ffcfb-6b37-4755-abf4-a675e776b827 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 912.084599] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-521ffcfb-6b37-4755-abf4-a675e776b827 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 912.086420] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-170b7746-0279-47ab-98f8-b7a1a35c03d6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.097255] env[70020]: DEBUG nova.network.neutron [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Successfully updated port: 5b0a839b-040e-424a-b8ad-91a46034cde9 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 912.099376] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-521ffcfb-6b37-4755-abf4-a675e776b827 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 912.099859] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4ce0b13f-1eac-4be1-8513-fc921167467e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.108513] env[70020]: DEBUG oslo_vmware.api [None req-521ffcfb-6b37-4755-abf4-a675e776b827 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Waiting for the task: (returnval){ [ 912.108513] env[70020]: value = "task-3618458" [ 912.108513] env[70020]: _type = "Task" [ 912.108513] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.113674] env[70020]: INFO nova.scheduler.client.report [None req-5e724a73-9268-4e66-98ca-3ffb35e160ae tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Deleted allocations for instance f16d60a4-5f80-4f41-b994-068de48775ad [ 912.115720] env[70020]: INFO nova.scheduler.client.report [None req-73fbd504-e53f-43f1-9171-7f60db02f2ed tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Deleted allocations for instance 0caa6acd-29d4-43ee-8b32-5149462dfc1c [ 912.315451] env[70020]: DEBUG nova.compute.manager [None req-29371888-e02a-43ba-bcf6-3641d5f1ceac tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 912.315758] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-29371888-e02a-43ba-bcf6-3641d5f1ceac tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 912.316621] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f4d8875-abd8-46d2-9ba4-987c50253e1e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.325219] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-29371888-e02a-43ba-bcf6-3641d5f1ceac tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 912.328966] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fbf060c1-0fab-47d2-af0e-75febbdcbedf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.330581] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618457, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.336915] env[70020]: DEBUG oslo_vmware.api [None req-29371888-e02a-43ba-bcf6-3641d5f1ceac tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Waiting for the task: (returnval){ [ 912.336915] env[70020]: value = "task-3618459" [ 912.336915] env[70020]: _type = "Task" [ 912.336915] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.345504] env[70020]: DEBUG oslo_vmware.api [None req-29371888-e02a-43ba-bcf6-3641d5f1ceac tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618459, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.348871] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 19036f6f-2ee3-4ea5-82fa-b510bf903922] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 912.480025] env[70020]: DEBUG nova.network.neutron [req-26757c45-3803-41bf-b019-275900831492 req-3881a8a4-3609-4c60-bd31-9924a5efc5ec service nova] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Updated VIF entry in instance network info cache for port 4896df9a-0702-4071-8432-b95ec01f1d13. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 912.480480] env[70020]: DEBUG nova.network.neutron [req-26757c45-3803-41bf-b019-275900831492 req-3881a8a4-3609-4c60-bd31-9924a5efc5ec service nova] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Updating instance_info_cache with network_info: [{"id": "4896df9a-0702-4071-8432-b95ec01f1d13", "address": "fa:16:3e:83:90:c8", "network": {"id": "43f80db7-dce1-4f89-90ff-8ba5981812e4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-604035824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "19128323d60a4992b0a2f837317d3f04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4896df9a-07", "ovs_interfaceid": "4896df9a-0702-4071-8432-b95ec01f1d13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.572502] env[70020]: DEBUG nova.compute.utils [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 912.574511] env[70020]: DEBUG nova.compute.manager [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 912.574725] env[70020]: DEBUG nova.network.neutron [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 912.600722] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "refresh_cache-8317f386-44d0-4b1b-8590-d0336fafac21" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.600892] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquired lock "refresh_cache-8317f386-44d0-4b1b-8590-d0336fafac21" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 912.601039] env[70020]: DEBUG nova.network.neutron [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 912.615972] env[70020]: DEBUG nova.policy [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1ebabdad8aa843f28165fcd167382c60', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cfa7d3b1f5a14c60b19cde5030c2f0a2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 912.626029] env[70020]: DEBUG oslo_vmware.api [None req-521ffcfb-6b37-4755-abf4-a675e776b827 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618458, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.626950] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5e724a73-9268-4e66-98ca-3ffb35e160ae tempest-ServersWithSpecificFlavorTestJSON-1827464871 tempest-ServersWithSpecificFlavorTestJSON-1827464871-project-member] Lock "f16d60a4-5f80-4f41-b994-068de48775ad" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.434s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 912.631619] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73fbd504-e53f-43f1-9171-7f60db02f2ed tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "0caa6acd-29d4-43ee-8b32-5149462dfc1c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.667s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 912.706138] env[70020]: DEBUG nova.compute.manager [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 912.707351] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b78a0c5-ddce-4443-9c1a-0d6cf8180777 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.821327] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618457, 'name': CreateVM_Task, 'duration_secs': 1.434554} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.821525] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 912.822276] env[70020]: DEBUG oslo_concurrency.lockutils [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.822511] env[70020]: DEBUG oslo_concurrency.lockutils [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 912.823108] env[70020]: DEBUG oslo_concurrency.lockutils [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 912.823201] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbfcf193-2cd2-4128-a581-54294612aee9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.828860] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 912.828860] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]521db050-556f-a132-72f2-9db5f1b4b5eb" [ 912.828860] env[70020]: _type = "Task" [ 912.828860] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.841816] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]521db050-556f-a132-72f2-9db5f1b4b5eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.851614] env[70020]: DEBUG oslo_vmware.api [None req-29371888-e02a-43ba-bcf6-3641d5f1ceac tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618459, 'name': PowerOffVM_Task, 'duration_secs': 0.247723} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.852038] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: a09db142-60d1-4a62-8e76-1e2e3676124f] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 912.854684] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-29371888-e02a-43ba-bcf6-3641d5f1ceac tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 912.854684] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-29371888-e02a-43ba-bcf6-3641d5f1ceac tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 912.854684] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e0d4df1f-d384-47a0-8e68-97ae7fdcd62d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.925803] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-29371888-e02a-43ba-bcf6-3641d5f1ceac tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 912.926059] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-29371888-e02a-43ba-bcf6-3641d5f1ceac tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 912.926262] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-29371888-e02a-43ba-bcf6-3641d5f1ceac tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Deleting the datastore file [datastore2] 8bff6907-c2b0-4ad1-9298-b2d622d33fde {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 912.926541] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-232eb756-fbc5-41eb-be46-446b4a6edf5b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.935943] env[70020]: DEBUG oslo_vmware.api [None req-29371888-e02a-43ba-bcf6-3641d5f1ceac tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Waiting for the task: (returnval){ [ 912.935943] env[70020]: value = "task-3618461" [ 912.935943] env[70020]: _type = "Task" [ 912.935943] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.945425] env[70020]: DEBUG oslo_vmware.api [None req-29371888-e02a-43ba-bcf6-3641d5f1ceac tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618461, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.983732] env[70020]: DEBUG oslo_concurrency.lockutils [req-26757c45-3803-41bf-b019-275900831492 req-3881a8a4-3609-4c60-bd31-9924a5efc5ec service nova] Releasing lock "refresh_cache-d65ab5e0-189c-43e1-accf-16248ad02852" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 913.078608] env[70020]: DEBUG nova.compute.manager [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 913.091690] env[70020]: DEBUG oslo_concurrency.lockutils [None req-97d0d711-bfc3-41d8-8af1-5238df056bb4 tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 913.095637] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.981s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 913.095637] env[70020]: INFO nova.compute.claims [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 913.120126] env[70020]: DEBUG oslo_vmware.api [None req-521ffcfb-6b37-4755-abf4-a675e776b827 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618458, 'name': PowerOffVM_Task, 'duration_secs': 0.725008} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.120954] env[70020]: DEBUG nova.network.neutron [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Successfully created port: 40bf9877-260b-49fc-85fd-307072a733f1 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 913.123474] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-521ffcfb-6b37-4755-abf4-a675e776b827 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 913.123640] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-521ffcfb-6b37-4755-abf4-a675e776b827 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 913.126310] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2f18fde7-a043-4d4e-85b3-8f60ff0fba54 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.141917] env[70020]: DEBUG nova.network.neutron [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 913.197791] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-521ffcfb-6b37-4755-abf4-a675e776b827 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 913.198294] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-521ffcfb-6b37-4755-abf4-a675e776b827 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 913.198535] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-521ffcfb-6b37-4755-abf4-a675e776b827 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Deleting the datastore file [datastore2] ef0d716a-080e-4167-bd34-b2c660b95c88 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 913.198805] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7a1542a1-d1d9-40ff-93fc-b7c357d064de {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.206310] env[70020]: DEBUG oslo_vmware.api [None req-521ffcfb-6b37-4755-abf4-a675e776b827 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Waiting for the task: (returnval){ [ 913.206310] env[70020]: value = "task-3618463" [ 913.206310] env[70020]: _type = "Task" [ 913.206310] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.214728] env[70020]: DEBUG oslo_vmware.api [None req-521ffcfb-6b37-4755-abf4-a675e776b827 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618463, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.219149] env[70020]: INFO nova.compute.manager [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] instance snapshotting [ 913.221851] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7396e0bf-2c5a-4a80-9d64-46128da5a200 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.243226] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f1cbcf6-c447-4773-a3d8-8dbbe469d691 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.316927] env[70020]: DEBUG nova.network.neutron [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Updating instance_info_cache with network_info: [{"id": "5b0a839b-040e-424a-b8ad-91a46034cde9", "address": "fa:16:3e:0b:80:03", "network": {"id": "8a7c653b-f915-404f-a4ab-bea48b7e6574", "bridge": "br-int", "label": "tempest-ImagesTestJSON-377932844-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52cd193f3ca7403a986d72f072590f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b0a839b-04", "ovs_interfaceid": "5b0a839b-040e-424a-b8ad-91a46034cde9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.340969] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]521db050-556f-a132-72f2-9db5f1b4b5eb, 'name': SearchDatastore_Task, 'duration_secs': 0.011992} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.341221] env[70020]: DEBUG oslo_concurrency.lockutils [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 913.341267] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 913.341540] env[70020]: DEBUG oslo_concurrency.lockutils [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.341726] env[70020]: DEBUG oslo_concurrency.lockutils [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 913.341911] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 913.342211] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5fd52667-9192-4652-aa2c-48c70bc9ce2c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.351748] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 913.351928] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 913.352698] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61eb419d-9f15-439a-9f40-fb90845d6d06 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.355227] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 1c45f085-0eb5-4edc-b5aa-9d5c53aca1e0] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 913.360574] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 913.360574] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]529d37cb-a487-6f01-7454-d45f66310a76" [ 913.360574] env[70020]: _type = "Task" [ 913.360574] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.371410] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]529d37cb-a487-6f01-7454-d45f66310a76, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.448877] env[70020]: DEBUG oslo_vmware.api [None req-29371888-e02a-43ba-bcf6-3641d5f1ceac tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618461, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159998} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.449294] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-29371888-e02a-43ba-bcf6-3641d5f1ceac tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 913.449559] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-29371888-e02a-43ba-bcf6-3641d5f1ceac tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 913.449800] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-29371888-e02a-43ba-bcf6-3641d5f1ceac tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 913.450042] env[70020]: INFO nova.compute.manager [None req-29371888-e02a-43ba-bcf6-3641d5f1ceac tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Took 1.13 seconds to destroy the instance on the hypervisor. [ 913.450349] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-29371888-e02a-43ba-bcf6-3641d5f1ceac tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 913.450600] env[70020]: DEBUG nova.compute.manager [-] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 913.450746] env[70020]: DEBUG nova.network.neutron [-] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 913.719384] env[70020]: DEBUG oslo_vmware.api [None req-521ffcfb-6b37-4755-abf4-a675e776b827 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Task: {'id': task-3618463, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.356967} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.719673] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-521ffcfb-6b37-4755-abf4-a675e776b827 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 913.719848] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-521ffcfb-6b37-4755-abf4-a675e776b827 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 913.720125] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-521ffcfb-6b37-4755-abf4-a675e776b827 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 913.720357] env[70020]: INFO nova.compute.manager [None req-521ffcfb-6b37-4755-abf4-a675e776b827 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Took 1.64 seconds to destroy the instance on the hypervisor. [ 913.720666] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-521ffcfb-6b37-4755-abf4-a675e776b827 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 913.721224] env[70020]: DEBUG nova.compute.manager [-] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 913.721224] env[70020]: DEBUG nova.network.neutron [-] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 913.755838] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Creating Snapshot of the VM instance {{(pid=70020) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 913.756172] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-f8348343-65bf-4ba3-9f46-4197373e5a69 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.771343] env[70020]: DEBUG oslo_vmware.api [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Waiting for the task: (returnval){ [ 913.771343] env[70020]: value = "task-3618464" [ 913.771343] env[70020]: _type = "Task" [ 913.771343] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.782155] env[70020]: DEBUG oslo_vmware.api [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618464, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.817595] env[70020]: DEBUG nova.compute.manager [req-218535ba-bfe2-415e-8d82-f0e5718bf5f9 req-84d494b3-c11a-4063-8437-481a65537b44 service nova] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Received event network-vif-plugged-5b0a839b-040e-424a-b8ad-91a46034cde9 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 913.817810] env[70020]: DEBUG oslo_concurrency.lockutils [req-218535ba-bfe2-415e-8d82-f0e5718bf5f9 req-84d494b3-c11a-4063-8437-481a65537b44 service nova] Acquiring lock "8317f386-44d0-4b1b-8590-d0336fafac21-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 913.818046] env[70020]: DEBUG oslo_concurrency.lockutils [req-218535ba-bfe2-415e-8d82-f0e5718bf5f9 req-84d494b3-c11a-4063-8437-481a65537b44 service nova] Lock "8317f386-44d0-4b1b-8590-d0336fafac21-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 913.818246] env[70020]: DEBUG oslo_concurrency.lockutils [req-218535ba-bfe2-415e-8d82-f0e5718bf5f9 req-84d494b3-c11a-4063-8437-481a65537b44 service nova] Lock "8317f386-44d0-4b1b-8590-d0336fafac21-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 913.818414] env[70020]: DEBUG nova.compute.manager [req-218535ba-bfe2-415e-8d82-f0e5718bf5f9 req-84d494b3-c11a-4063-8437-481a65537b44 service nova] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] No waiting events found dispatching network-vif-plugged-5b0a839b-040e-424a-b8ad-91a46034cde9 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 913.818934] env[70020]: WARNING nova.compute.manager [req-218535ba-bfe2-415e-8d82-f0e5718bf5f9 req-84d494b3-c11a-4063-8437-481a65537b44 service nova] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Received unexpected event network-vif-plugged-5b0a839b-040e-424a-b8ad-91a46034cde9 for instance with vm_state building and task_state spawning. [ 913.818934] env[70020]: DEBUG nova.compute.manager [req-218535ba-bfe2-415e-8d82-f0e5718bf5f9 req-84d494b3-c11a-4063-8437-481a65537b44 service nova] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Received event network-changed-5b0a839b-040e-424a-b8ad-91a46034cde9 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 913.819124] env[70020]: DEBUG nova.compute.manager [req-218535ba-bfe2-415e-8d82-f0e5718bf5f9 req-84d494b3-c11a-4063-8437-481a65537b44 service nova] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Refreshing instance network info cache due to event network-changed-5b0a839b-040e-424a-b8ad-91a46034cde9. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 913.819182] env[70020]: DEBUG oslo_concurrency.lockutils [req-218535ba-bfe2-415e-8d82-f0e5718bf5f9 req-84d494b3-c11a-4063-8437-481a65537b44 service nova] Acquiring lock "refresh_cache-8317f386-44d0-4b1b-8590-d0336fafac21" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.820301] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Releasing lock "refresh_cache-8317f386-44d0-4b1b-8590-d0336fafac21" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 913.820612] env[70020]: DEBUG nova.compute.manager [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Instance network_info: |[{"id": "5b0a839b-040e-424a-b8ad-91a46034cde9", "address": "fa:16:3e:0b:80:03", "network": {"id": "8a7c653b-f915-404f-a4ab-bea48b7e6574", "bridge": "br-int", "label": "tempest-ImagesTestJSON-377932844-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52cd193f3ca7403a986d72f072590f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b0a839b-04", "ovs_interfaceid": "5b0a839b-040e-424a-b8ad-91a46034cde9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 913.820966] env[70020]: DEBUG oslo_concurrency.lockutils [req-218535ba-bfe2-415e-8d82-f0e5718bf5f9 req-84d494b3-c11a-4063-8437-481a65537b44 service nova] Acquired lock "refresh_cache-8317f386-44d0-4b1b-8590-d0336fafac21" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 913.821197] env[70020]: DEBUG nova.network.neutron [req-218535ba-bfe2-415e-8d82-f0e5718bf5f9 req-84d494b3-c11a-4063-8437-481a65537b44 service nova] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Refreshing network info cache for port 5b0a839b-040e-424a-b8ad-91a46034cde9 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 913.822625] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0b:80:03', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a0734cc4-5718-45e2-9f98-0ded96880bef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5b0a839b-040e-424a-b8ad-91a46034cde9', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 913.830275] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 913.834198] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 913.835179] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9372a989-6aaa-49e7-96ec-57f7cff86c4f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.854985] env[70020]: DEBUG nova.compute.manager [req-45b396e5-083d-432e-9a78-4a25b62d4c4f req-79fb890a-ddc1-4998-bd47-a0506331ce74 service nova] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Received event network-vif-deleted-36d75dbd-aa9f-46d8-ad64-f95577fdb5f6 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 913.855237] env[70020]: INFO nova.compute.manager [req-45b396e5-083d-432e-9a78-4a25b62d4c4f req-79fb890a-ddc1-4998-bd47-a0506331ce74 service nova] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Neutron deleted interface 36d75dbd-aa9f-46d8-ad64-f95577fdb5f6; detaching it from the instance and deleting it from the info cache [ 913.855444] env[70020]: DEBUG nova.network.neutron [req-45b396e5-083d-432e-9a78-4a25b62d4c4f req-79fb890a-ddc1-4998-bd47-a0506331ce74 service nova] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.858407] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: f53cb08c-0939-4cb1-8476-8b289d6a1b05] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 913.868213] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 913.868213] env[70020]: value = "task-3618465" [ 913.868213] env[70020]: _type = "Task" [ 913.868213] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.878035] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]529d37cb-a487-6f01-7454-d45f66310a76, 'name': SearchDatastore_Task, 'duration_secs': 0.013115} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.880040] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72b834fc-7204-4bc4-8db8-f3b7ee26acfd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.885180] env[70020]: DEBUG oslo_concurrency.lockutils [None req-86fe144a-f6e0-4d5b-90e7-9c1a169db90c tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquiring lock "bc57657e-99e8-46b8-9731-ddd4864a3114" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 913.885371] env[70020]: DEBUG oslo_concurrency.lockutils [None req-86fe144a-f6e0-4d5b-90e7-9c1a169db90c tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "bc57657e-99e8-46b8-9731-ddd4864a3114" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 913.885600] env[70020]: DEBUG oslo_concurrency.lockutils [None req-86fe144a-f6e0-4d5b-90e7-9c1a169db90c tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquiring lock "bc57657e-99e8-46b8-9731-ddd4864a3114-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 913.885762] env[70020]: DEBUG oslo_concurrency.lockutils [None req-86fe144a-f6e0-4d5b-90e7-9c1a169db90c tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "bc57657e-99e8-46b8-9731-ddd4864a3114-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 913.885927] env[70020]: DEBUG oslo_concurrency.lockutils [None req-86fe144a-f6e0-4d5b-90e7-9c1a169db90c tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "bc57657e-99e8-46b8-9731-ddd4864a3114-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 913.887568] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618465, 'name': CreateVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.888493] env[70020]: INFO nova.compute.manager [None req-86fe144a-f6e0-4d5b-90e7-9c1a169db90c tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Terminating instance [ 913.892886] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 913.892886] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]522946f7-3d64-d263-7a6a-8f2ec8d10ba5" [ 913.892886] env[70020]: _type = "Task" [ 913.892886] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.903481] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]522946f7-3d64-d263-7a6a-8f2ec8d10ba5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.090961] env[70020]: DEBUG nova.compute.manager [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 914.135520] env[70020]: DEBUG nova.virt.hardware [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 914.135881] env[70020]: DEBUG nova.virt.hardware [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 914.136081] env[70020]: DEBUG nova.virt.hardware [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 914.136294] env[70020]: DEBUG nova.virt.hardware [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 914.136476] env[70020]: DEBUG nova.virt.hardware [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 914.136987] env[70020]: DEBUG nova.virt.hardware [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 914.136987] env[70020]: DEBUG nova.virt.hardware [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 914.137109] env[70020]: DEBUG nova.virt.hardware [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 914.137317] env[70020]: DEBUG nova.virt.hardware [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 914.137425] env[70020]: DEBUG nova.virt.hardware [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 914.137617] env[70020]: DEBUG nova.virt.hardware [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 914.138604] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c75b974c-21e2-4dbe-93cc-9dbb3566dd67 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.151599] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bee48f9-99f3-4f61-afb8-56ed3398777a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.162640] env[70020]: DEBUG nova.network.neutron [req-218535ba-bfe2-415e-8d82-f0e5718bf5f9 req-84d494b3-c11a-4063-8437-481a65537b44 service nova] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Updated VIF entry in instance network info cache for port 5b0a839b-040e-424a-b8ad-91a46034cde9. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 914.163091] env[70020]: DEBUG nova.network.neutron [req-218535ba-bfe2-415e-8d82-f0e5718bf5f9 req-84d494b3-c11a-4063-8437-481a65537b44 service nova] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Updating instance_info_cache with network_info: [{"id": "5b0a839b-040e-424a-b8ad-91a46034cde9", "address": "fa:16:3e:0b:80:03", "network": {"id": "8a7c653b-f915-404f-a4ab-bea48b7e6574", "bridge": "br-int", "label": "tempest-ImagesTestJSON-377932844-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52cd193f3ca7403a986d72f072590f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b0a839b-04", "ovs_interfaceid": "5b0a839b-040e-424a-b8ad-91a46034cde9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.213975] env[70020]: DEBUG nova.network.neutron [-] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.281608] env[70020]: DEBUG oslo_vmware.api [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618464, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.360288] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-81f7f420-2d21-4b8b-b4dc-2b7c918bd550 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.368510] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: c4335d00-29a3-4f2e-b826-1a78ef02e0bf] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 914.376290] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7615137-042e-4789-a4bf-e62549f93af1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.395076] env[70020]: DEBUG nova.compute.manager [None req-86fe144a-f6e0-4d5b-90e7-9c1a169db90c tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 914.395311] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-86fe144a-f6e0-4d5b-90e7-9c1a169db90c tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 914.396756] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebdb5ef9-869a-4ed5-bd1d-b3e8456ac7f8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.411277] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618465, 'name': CreateVM_Task, 'duration_secs': 0.433867} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.428039] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 914.429350] env[70020]: DEBUG nova.compute.manager [req-45b396e5-083d-432e-9a78-4a25b62d4c4f req-79fb890a-ddc1-4998-bd47-a0506331ce74 service nova] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Detach interface failed, port_id=36d75dbd-aa9f-46d8-ad64-f95577fdb5f6, reason: Instance 8bff6907-c2b0-4ad1-9298-b2d622d33fde could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 914.432196] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/038d3b5b-38fb-498f-b4cc-5ed167e098c3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.432360] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquired lock "[datastore2] devstack-image-cache_base/038d3b5b-38fb-498f-b4cc-5ed167e098c3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 914.432796] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/038d3b5b-38fb-498f-b4cc-5ed167e098c3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 914.438328] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ebb407bc-a014-4ed1-8c1a-0eba75060429 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.440028] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]522946f7-3d64-d263-7a6a-8f2ec8d10ba5, 'name': SearchDatastore_Task, 'duration_secs': 0.012035} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.440586] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-86fe144a-f6e0-4d5b-90e7-9c1a169db90c tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 914.440858] env[70020]: DEBUG oslo_concurrency.lockutils [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 914.441175] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] d65ab5e0-189c-43e1-accf-16248ad02852/d65ab5e0-189c-43e1-accf-16248ad02852.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 914.441399] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f9fe115d-b3ee-4b60-8e45-1c1d86267bb0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.443348] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0ed3ad77-a18f-43bf-a8f0-456e1aec668e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.447117] env[70020]: DEBUG oslo_vmware.api [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 914.447117] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52c55208-1d42-3a42-cbee-b8001a7b9868" [ 914.447117] env[70020]: _type = "Task" [ 914.447117] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.455447] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 914.455447] env[70020]: value = "task-3618467" [ 914.455447] env[70020]: _type = "Task" [ 914.455447] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.456637] env[70020]: DEBUG oslo_vmware.api [None req-86fe144a-f6e0-4d5b-90e7-9c1a169db90c tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 914.456637] env[70020]: value = "task-3618466" [ 914.456637] env[70020]: _type = "Task" [ 914.456637] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.468137] env[70020]: DEBUG oslo_vmware.api [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52c55208-1d42-3a42-cbee-b8001a7b9868, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.477854] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618467, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.481655] env[70020]: DEBUG oslo_vmware.api [None req-86fe144a-f6e0-4d5b-90e7-9c1a169db90c tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618466, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.597350] env[70020]: DEBUG nova.network.neutron [-] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.668232] env[70020]: DEBUG oslo_concurrency.lockutils [req-218535ba-bfe2-415e-8d82-f0e5718bf5f9 req-84d494b3-c11a-4063-8437-481a65537b44 service nova] Releasing lock "refresh_cache-8317f386-44d0-4b1b-8590-d0336fafac21" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 914.717112] env[70020]: INFO nova.compute.manager [-] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Took 1.27 seconds to deallocate network for instance. [ 914.730324] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e3d1af4-6a8f-4297-a6dc-fbf7a64da819 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.744402] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e69a168f-77e8-474b-8643-b9a2d95ab294 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.782479] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cc27679-019d-43ac-80d4-fbb8f94aaa9e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.792218] env[70020]: DEBUG oslo_vmware.api [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618464, 'name': CreateSnapshot_Task, 'duration_secs': 0.825054} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.794341] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Created Snapshot of the VM instance {{(pid=70020) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 914.795330] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4795217-7276-4dfa-aee8-fd1f04032f3c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.798815] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4edb4185-820e-46b2-9d6e-869705869597 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.815812] env[70020]: DEBUG nova.compute.provider_tree [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 914.890286] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 7cf7f0a9-8240-4e78-b5d4-b1eb1da60764] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 914.969603] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Releasing lock "[datastore2] devstack-image-cache_base/038d3b5b-38fb-498f-b4cc-5ed167e098c3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 914.970039] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Processing image 038d3b5b-38fb-498f-b4cc-5ed167e098c3 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 914.970165] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/038d3b5b-38fb-498f-b4cc-5ed167e098c3/038d3b5b-38fb-498f-b4cc-5ed167e098c3.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.970371] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquired lock "[datastore2] devstack-image-cache_base/038d3b5b-38fb-498f-b4cc-5ed167e098c3/038d3b5b-38fb-498f-b4cc-5ed167e098c3.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 914.970586] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 914.970888] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2244593e-e208-4de2-ae14-8a16d73c3909 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.976897] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618467, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.981668] env[70020]: DEBUG oslo_vmware.api [None req-86fe144a-f6e0-4d5b-90e7-9c1a169db90c tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618466, 'name': PowerOffVM_Task, 'duration_secs': 0.21765} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.981935] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-86fe144a-f6e0-4d5b-90e7-9c1a169db90c tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 914.982146] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-86fe144a-f6e0-4d5b-90e7-9c1a169db90c tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 914.983193] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b8d82f70-1ef3-4eee-a1fd-f83f26305f7c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.984883] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 914.985172] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 914.986478] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b38ff08-e1c7-4560-bc7c-361933663bed {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.992698] env[70020]: DEBUG oslo_vmware.api [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 914.992698] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]522bea7f-4fb8-c24c-cc1a-6c36eba66e12" [ 914.992698] env[70020]: _type = "Task" [ 914.992698] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.006199] env[70020]: DEBUG oslo_vmware.api [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]522bea7f-4fb8-c24c-cc1a-6c36eba66e12, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.054473] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-86fe144a-f6e0-4d5b-90e7-9c1a169db90c tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 915.054678] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-86fe144a-f6e0-4d5b-90e7-9c1a169db90c tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 915.054856] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-86fe144a-f6e0-4d5b-90e7-9c1a169db90c tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Deleting the datastore file [datastore2] bc57657e-99e8-46b8-9731-ddd4864a3114 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 915.055146] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9b8c8f29-e24f-44b8-ac31-5dccb6944895 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.063375] env[70020]: DEBUG oslo_vmware.api [None req-86fe144a-f6e0-4d5b-90e7-9c1a169db90c tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 915.063375] env[70020]: value = "task-3618469" [ 915.063375] env[70020]: _type = "Task" [ 915.063375] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.074572] env[70020]: DEBUG oslo_vmware.api [None req-86fe144a-f6e0-4d5b-90e7-9c1a169db90c tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618469, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.100355] env[70020]: INFO nova.compute.manager [-] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Took 1.38 seconds to deallocate network for instance. [ 915.239519] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29371888-e02a-43ba-bcf6-3641d5f1ceac tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 915.326785] env[70020]: DEBUG nova.scheduler.client.report [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 915.339078] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Creating linked-clone VM from snapshot {{(pid=70020) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 915.339788] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-5ad8257a-abc0-4c7c-ae26-c10ab22764cf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.351314] env[70020]: DEBUG oslo_vmware.api [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Waiting for the task: (returnval){ [ 915.351314] env[70020]: value = "task-3618470" [ 915.351314] env[70020]: _type = "Task" [ 915.351314] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.361868] env[70020]: DEBUG oslo_vmware.api [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618470, 'name': CloneVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.393478] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 50ce7a0c-aa80-4816-b84e-d8ff7b10fffb] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 915.469771] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618467, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.543991} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.470267] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] d65ab5e0-189c-43e1-accf-16248ad02852/d65ab5e0-189c-43e1-accf-16248ad02852.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 915.471155] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 915.471512] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8236e206-ebc5-48c4-bdb9-c4c4ead657f9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.475696] env[70020]: DEBUG nova.network.neutron [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Successfully updated port: 40bf9877-260b-49fc-85fd-307072a733f1 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 915.483034] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 915.483034] env[70020]: value = "task-3618471" [ 915.483034] env[70020]: _type = "Task" [ 915.483034] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.492900] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618471, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.507014] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Preparing fetch location {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 915.507488] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Fetch image to [datastore2] OSTACK_IMG_26d66003-bfbb-48dc-97ce-b4992f3180a6/OSTACK_IMG_26d66003-bfbb-48dc-97ce-b4992f3180a6.vmdk {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 915.507814] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Downloading stream optimized image 038d3b5b-38fb-498f-b4cc-5ed167e098c3 to [datastore2] OSTACK_IMG_26d66003-bfbb-48dc-97ce-b4992f3180a6/OSTACK_IMG_26d66003-bfbb-48dc-97ce-b4992f3180a6.vmdk on the data store datastore2 as vApp {{(pid=70020) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 915.508237] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Downloading image file data 038d3b5b-38fb-498f-b4cc-5ed167e098c3 to the ESX as VM named 'OSTACK_IMG_26d66003-bfbb-48dc-97ce-b4992f3180a6' {{(pid=70020) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 915.576242] env[70020]: DEBUG oslo_vmware.api [None req-86fe144a-f6e0-4d5b-90e7-9c1a169db90c tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618469, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.153068} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.576647] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-86fe144a-f6e0-4d5b-90e7-9c1a169db90c tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 915.577069] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-86fe144a-f6e0-4d5b-90e7-9c1a169db90c tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 915.577688] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-86fe144a-f6e0-4d5b-90e7-9c1a169db90c tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 915.577688] env[70020]: INFO nova.compute.manager [None req-86fe144a-f6e0-4d5b-90e7-9c1a169db90c tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Took 1.18 seconds to destroy the instance on the hypervisor. [ 915.578025] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-86fe144a-f6e0-4d5b-90e7-9c1a169db90c tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 915.578273] env[70020]: DEBUG nova.compute.manager [-] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 915.578417] env[70020]: DEBUG nova.network.neutron [-] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 915.605132] env[70020]: DEBUG oslo_vmware.rw_handles [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 915.605132] env[70020]: value = "resgroup-9" [ 915.605132] env[70020]: _type = "ResourcePool" [ 915.605132] env[70020]: }. {{(pid=70020) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 915.605371] env[70020]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-d8d06bf5-17b5-4f26-a71b-e568bf7bc58d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.622770] env[70020]: DEBUG oslo_concurrency.lockutils [None req-521ffcfb-6b37-4755-abf4-a675e776b827 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 915.634119] env[70020]: DEBUG oslo_vmware.rw_handles [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lease: (returnval){ [ 915.634119] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]520c9cff-ab44-c091-e21b-db7fd43744a4" [ 915.634119] env[70020]: _type = "HttpNfcLease" [ 915.634119] env[70020]: } obtained for vApp import into resource pool (val){ [ 915.634119] env[70020]: value = "resgroup-9" [ 915.634119] env[70020]: _type = "ResourcePool" [ 915.634119] env[70020]: }. {{(pid=70020) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 915.634474] env[70020]: DEBUG oslo_vmware.api [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the lease: (returnval){ [ 915.634474] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]520c9cff-ab44-c091-e21b-db7fd43744a4" [ 915.634474] env[70020]: _type = "HttpNfcLease" [ 915.634474] env[70020]: } to be ready. {{(pid=70020) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 915.642411] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 915.642411] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]520c9cff-ab44-c091-e21b-db7fd43744a4" [ 915.642411] env[70020]: _type = "HttpNfcLease" [ 915.642411] env[70020]: } is initializing. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 915.840829] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.748s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 915.841244] env[70020]: DEBUG nova.compute.manager [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 915.844522] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b327bb9d-0787-430c-b40d-be2968095744 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.138s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 915.848246] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b327bb9d-0787-430c-b40d-be2968095744 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 915.851052] env[70020]: DEBUG oslo_concurrency.lockutils [None req-460a3a54-9746-4733-a697-2f6b1e3e430d tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.201s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 915.852227] env[70020]: DEBUG oslo_concurrency.lockutils [None req-460a3a54-9746-4733-a697-2f6b1e3e430d tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 915.857661] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.190s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 915.861629] env[70020]: INFO nova.compute.claims [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 915.880549] env[70020]: DEBUG oslo_vmware.api [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618470, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.889263] env[70020]: INFO nova.scheduler.client.report [None req-460a3a54-9746-4733-a697-2f6b1e3e430d tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Deleted allocations for instance 24184767-92f7-48b3-bbad-16a596ececde [ 915.891385] env[70020]: INFO nova.scheduler.client.report [None req-b327bb9d-0787-430c-b40d-be2968095744 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Deleted allocations for instance 16c45b86-317a-4d0c-a402-51c85af37a5b [ 915.900396] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 832a38c8-ed3a-460b-91bd-0138d2f2d03d] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 915.978607] env[70020]: DEBUG oslo_concurrency.lockutils [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "refresh_cache-a8982c31-ea86-4a8d-b8c6-006263ef41f8" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.978756] env[70020]: DEBUG oslo_concurrency.lockutils [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquired lock "refresh_cache-a8982c31-ea86-4a8d-b8c6-006263ef41f8" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 915.979432] env[70020]: DEBUG nova.network.neutron [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 915.990226] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618471, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.118702} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.990533] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 915.991399] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04241379-2749-4c9e-9edf-381a86b213f6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.017707] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Reconfiguring VM instance instance-00000045 to attach disk [datastore2] d65ab5e0-189c-43e1-accf-16248ad02852/d65ab5e0-189c-43e1-accf-16248ad02852.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 916.018368] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1827cfac-78b8-4702-81f2-5866cb521969 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.040713] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 916.040713] env[70020]: value = "task-3618473" [ 916.040713] env[70020]: _type = "Task" [ 916.040713] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.055998] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618473, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.146133] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 916.146133] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]520c9cff-ab44-c091-e21b-db7fd43744a4" [ 916.146133] env[70020]: _type = "HttpNfcLease" [ 916.146133] env[70020]: } is initializing. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 916.257225] env[70020]: DEBUG nova.compute.manager [req-4bef8096-a751-4aeb-b16b-4c45ff7bdcd3 req-cdd51724-ad13-460f-ad8b-09110e57ce37 service nova] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Received event network-vif-plugged-40bf9877-260b-49fc-85fd-307072a733f1 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 916.257225] env[70020]: DEBUG oslo_concurrency.lockutils [req-4bef8096-a751-4aeb-b16b-4c45ff7bdcd3 req-cdd51724-ad13-460f-ad8b-09110e57ce37 service nova] Acquiring lock "a8982c31-ea86-4a8d-b8c6-006263ef41f8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 916.257225] env[70020]: DEBUG oslo_concurrency.lockutils [req-4bef8096-a751-4aeb-b16b-4c45ff7bdcd3 req-cdd51724-ad13-460f-ad8b-09110e57ce37 service nova] Lock "a8982c31-ea86-4a8d-b8c6-006263ef41f8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 916.257749] env[70020]: DEBUG oslo_concurrency.lockutils [req-4bef8096-a751-4aeb-b16b-4c45ff7bdcd3 req-cdd51724-ad13-460f-ad8b-09110e57ce37 service nova] Lock "a8982c31-ea86-4a8d-b8c6-006263ef41f8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.258167] env[70020]: DEBUG nova.compute.manager [req-4bef8096-a751-4aeb-b16b-4c45ff7bdcd3 req-cdd51724-ad13-460f-ad8b-09110e57ce37 service nova] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] No waiting events found dispatching network-vif-plugged-40bf9877-260b-49fc-85fd-307072a733f1 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 916.258757] env[70020]: WARNING nova.compute.manager [req-4bef8096-a751-4aeb-b16b-4c45ff7bdcd3 req-cdd51724-ad13-460f-ad8b-09110e57ce37 service nova] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Received unexpected event network-vif-plugged-40bf9877-260b-49fc-85fd-307072a733f1 for instance with vm_state building and task_state spawning. [ 916.259137] env[70020]: DEBUG nova.compute.manager [req-4bef8096-a751-4aeb-b16b-4c45ff7bdcd3 req-cdd51724-ad13-460f-ad8b-09110e57ce37 service nova] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Received event network-changed-40bf9877-260b-49fc-85fd-307072a733f1 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 916.259463] env[70020]: DEBUG nova.compute.manager [req-4bef8096-a751-4aeb-b16b-4c45ff7bdcd3 req-cdd51724-ad13-460f-ad8b-09110e57ce37 service nova] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Refreshing instance network info cache due to event network-changed-40bf9877-260b-49fc-85fd-307072a733f1. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 916.259899] env[70020]: DEBUG oslo_concurrency.lockutils [req-4bef8096-a751-4aeb-b16b-4c45ff7bdcd3 req-cdd51724-ad13-460f-ad8b-09110e57ce37 service nova] Acquiring lock "refresh_cache-a8982c31-ea86-4a8d-b8c6-006263ef41f8" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.279180] env[70020]: DEBUG nova.compute.manager [req-aaf64984-152b-43fd-bee2-9ecd9790c716 req-785d1e06-c0d7-4cde-9c0b-b9892e8f9623 service nova] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Received event network-vif-deleted-e4568496-25b6-4661-bb65-1608ffd75212 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 916.368223] env[70020]: DEBUG nova.compute.utils [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 916.371474] env[70020]: DEBUG nova.compute.manager [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 916.371773] env[70020]: DEBUG nova.network.neutron [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 916.388769] env[70020]: DEBUG oslo_vmware.api [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618470, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.407706] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 0f89d49e-d26c-4d5d-90d7-6f0bf3d67468] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 916.409633] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b327bb9d-0787-430c-b40d-be2968095744 tempest-ServersAdminNegativeTestJSON-1634392682 tempest-ServersAdminNegativeTestJSON-1634392682-project-member] Lock "16c45b86-317a-4d0c-a402-51c85af37a5b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.272s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.410819] env[70020]: DEBUG oslo_concurrency.lockutils [None req-460a3a54-9746-4733-a697-2f6b1e3e430d tempest-ServerShowV254Test-361790264 tempest-ServerShowV254Test-361790264-project-member] Lock "24184767-92f7-48b3-bbad-16a596ececde" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.003s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.484111] env[70020]: DEBUG nova.policy [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dec9771972184de8926b885067533a6d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f14ea4f517a04de69f8bc56a19f2be8e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 916.529150] env[70020]: DEBUG nova.network.neutron [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 916.552216] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618473, 'name': ReconfigVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.645635] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 916.645635] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]520c9cff-ab44-c091-e21b-db7fd43744a4" [ 916.645635] env[70020]: _type = "HttpNfcLease" [ 916.645635] env[70020]: } is ready. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 916.645962] env[70020]: DEBUG oslo_vmware.rw_handles [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 916.645962] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]520c9cff-ab44-c091-e21b-db7fd43744a4" [ 916.645962] env[70020]: _type = "HttpNfcLease" [ 916.645962] env[70020]: }. {{(pid=70020) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 916.646699] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-997fe0cc-b399-49c6-934c-99a83662c56f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.656873] env[70020]: DEBUG oslo_vmware.rw_handles [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d2aee9-ac15-f8b1-4d5c-568f98d2af48/disk-0.vmdk from lease info. {{(pid=70020) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 916.657433] env[70020]: DEBUG oslo_vmware.rw_handles [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d2aee9-ac15-f8b1-4d5c-568f98d2af48/disk-0.vmdk. {{(pid=70020) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 916.729786] env[70020]: DEBUG nova.network.neutron [-] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.745240] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b69b60dd-bb53-4087-945b-506e31d4ce10 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.815503] env[70020]: DEBUG nova.network.neutron [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Updating instance_info_cache with network_info: [{"id": "40bf9877-260b-49fc-85fd-307072a733f1", "address": "fa:16:3e:c1:4b:73", "network": {"id": "83a3ee04-e0ff-40e7-ae51-c463add43abb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2003290189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cfa7d3b1f5a14c60b19cde5030c2f0a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40bf9877-26", "ovs_interfaceid": "40bf9877-260b-49fc-85fd-307072a733f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.872030] env[70020]: DEBUG nova.compute.manager [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 916.890985] env[70020]: DEBUG oslo_vmware.api [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618470, 'name': CloneVM_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.914022] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 301b30f6-9909-4fc9-8721-88a314e4edb4] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 917.064563] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618473, 'name': ReconfigVM_Task, 'duration_secs': 0.52073} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.064846] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Reconfigured VM instance instance-00000045 to attach disk [datastore2] d65ab5e0-189c-43e1-accf-16248ad02852/d65ab5e0-189c-43e1-accf-16248ad02852.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 917.065581] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7f52e8e4-11b7-414c-a8a5-b05f14cf1c8c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.080615] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 917.080615] env[70020]: value = "task-3618474" [ 917.080615] env[70020]: _type = "Task" [ 917.080615] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.095619] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618474, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.173161] env[70020]: DEBUG nova.network.neutron [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Successfully created port: 7648a826-e268-4333-96ce-f336ff254b66 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 917.234906] env[70020]: INFO nova.compute.manager [-] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Took 1.66 seconds to deallocate network for instance. [ 917.321175] env[70020]: DEBUG oslo_concurrency.lockutils [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Releasing lock "refresh_cache-a8982c31-ea86-4a8d-b8c6-006263ef41f8" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 917.321495] env[70020]: DEBUG nova.compute.manager [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Instance network_info: |[{"id": "40bf9877-260b-49fc-85fd-307072a733f1", "address": "fa:16:3e:c1:4b:73", "network": {"id": "83a3ee04-e0ff-40e7-ae51-c463add43abb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2003290189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cfa7d3b1f5a14c60b19cde5030c2f0a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40bf9877-26", "ovs_interfaceid": "40bf9877-260b-49fc-85fd-307072a733f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 917.328239] env[70020]: DEBUG oslo_concurrency.lockutils [req-4bef8096-a751-4aeb-b16b-4c45ff7bdcd3 req-cdd51724-ad13-460f-ad8b-09110e57ce37 service nova] Acquired lock "refresh_cache-a8982c31-ea86-4a8d-b8c6-006263ef41f8" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 917.328507] env[70020]: DEBUG nova.network.neutron [req-4bef8096-a751-4aeb-b16b-4c45ff7bdcd3 req-cdd51724-ad13-460f-ad8b-09110e57ce37 service nova] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Refreshing network info cache for port 40bf9877-260b-49fc-85fd-307072a733f1 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 917.329870] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c1:4b:73', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d4ef133-b6f3-41d1-add4-92a1482195cf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '40bf9877-260b-49fc-85fd-307072a733f1', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 917.337737] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 917.344625] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 917.345615] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ddd7fac4-f8d2-4a65-b496-40a7480d6fb5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.376584] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 917.376584] env[70020]: value = "task-3618475" [ 917.376584] env[70020]: _type = "Task" [ 917.376584] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.394556] env[70020]: DEBUG oslo_vmware.api [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618470, 'name': CloneVM_Task, 'duration_secs': 1.580222} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.396023] env[70020]: INFO nova.virt.vmwareapi.vmops [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Created linked-clone VM from snapshot [ 917.396885] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae947e4a-b5f0-437c-b1c9-2311048edafb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.405432] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618475, 'name': CreateVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.415883] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Uploading image c5e24af1-d7e3-4c3c-af61-b7b1976b5f88 {{(pid=70020) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 917.422435] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 3501a6fc-f090-4098-8f63-57a97bd61f1b] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 917.434874] env[70020]: DEBUG oslo_vmware.rw_handles [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Completed reading data from the image iterator. {{(pid=70020) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 917.435285] env[70020]: DEBUG oslo_vmware.rw_handles [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d2aee9-ac15-f8b1-4d5c-568f98d2af48/disk-0.vmdk. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 917.436729] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21b5cc51-f58d-4d08-977d-aa33e625101b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.455329] env[70020]: DEBUG oslo_vmware.rw_handles [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d2aee9-ac15-f8b1-4d5c-568f98d2af48/disk-0.vmdk is in state: ready. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 917.455329] env[70020]: DEBUG oslo_vmware.rw_handles [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d2aee9-ac15-f8b1-4d5c-568f98d2af48/disk-0.vmdk. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 917.455329] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-faf288b0-e768-47ad-8f0d-754c1cd2417a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.458533] env[70020]: DEBUG oslo_vmware.rw_handles [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 917.458533] env[70020]: value = "vm-721723" [ 917.458533] env[70020]: _type = "VirtualMachine" [ 917.458533] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 917.458533] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-d4c0616a-e1e5-40da-9953-d494a289cc75 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.470221] env[70020]: DEBUG oslo_vmware.rw_handles [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Lease: (returnval){ [ 917.470221] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5216d44b-085f-969c-c0f5-44f0d44cabf9" [ 917.470221] env[70020]: _type = "HttpNfcLease" [ 917.470221] env[70020]: } obtained for exporting VM: (result){ [ 917.470221] env[70020]: value = "vm-721723" [ 917.470221] env[70020]: _type = "VirtualMachine" [ 917.470221] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 917.470221] env[70020]: DEBUG oslo_vmware.api [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Waiting for the lease: (returnval){ [ 917.470221] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5216d44b-085f-969c-c0f5-44f0d44cabf9" [ 917.470221] env[70020]: _type = "HttpNfcLease" [ 917.470221] env[70020]: } to be ready. {{(pid=70020) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 917.485167] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 917.485167] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5216d44b-085f-969c-c0f5-44f0d44cabf9" [ 917.485167] env[70020]: _type = "HttpNfcLease" [ 917.485167] env[70020]: } is ready. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 917.485999] env[70020]: DEBUG oslo_vmware.rw_handles [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 917.485999] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5216d44b-085f-969c-c0f5-44f0d44cabf9" [ 917.485999] env[70020]: _type = "HttpNfcLease" [ 917.485999] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 917.486908] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53b8bf28-7600-4ba8-94a7-cb2e0301641f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.498240] env[70020]: DEBUG oslo_vmware.rw_handles [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a4b1f2-1352-14ae-70ff-2cd3a1b6e13e/disk-0.vmdk from lease info. {{(pid=70020) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 917.498670] env[70020]: DEBUG oslo_vmware.rw_handles [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a4b1f2-1352-14ae-70ff-2cd3a1b6e13e/disk-0.vmdk for reading. {{(pid=70020) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 917.591009] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618474, 'name': Rename_Task, 'duration_secs': 0.179492} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.593937] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 917.596489] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-24c16b90-2f73-4364-b819-e22f05afae6c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.598277] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-3d5d94ca-c725-4444-bde5-cd3f2d3c0a05 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.609629] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 917.609629] env[70020]: value = "task-3618477" [ 917.609629] env[70020]: _type = "Task" [ 917.609629] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.622192] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618477, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.650318] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddd35a5d-e84c-48c2-9f16-1d2e24eb6ccc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.663107] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0867508-0bcd-4620-bd9d-6b38bdc8a457 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.698630] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c026a8d-8be7-4f09-b5d0-7b313efe3fe5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.701441] env[70020]: DEBUG oslo_vmware.rw_handles [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d2aee9-ac15-f8b1-4d5c-568f98d2af48/disk-0.vmdk. {{(pid=70020) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 917.701612] env[70020]: INFO nova.virt.vmwareapi.images [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Downloaded image file data 038d3b5b-38fb-498f-b4cc-5ed167e098c3 [ 917.702345] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c83f996-57a9-4f06-8737-689393d23ac2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.725257] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-927ee003-3821-4e70-bfad-345d55084210 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.729724] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-43937888-2ba1-4c61-b83f-f3aaeaa4d052 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.754235] env[70020]: DEBUG oslo_concurrency.lockutils [None req-86fe144a-f6e0-4d5b-90e7-9c1a169db90c tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 917.754235] env[70020]: DEBUG nova.compute.provider_tree [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 917.767137] env[70020]: INFO nova.virt.vmwareapi.images [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] The imported VM was unregistered [ 917.771244] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Caching image {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 917.771244] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Creating directory with path [datastore2] devstack-image-cache_base/038d3b5b-38fb-498f-b4cc-5ed167e098c3 {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 917.771244] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c5f1bfa7-afef-4d11-8d20-5febbfa8fd8e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.787022] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Created directory with path [datastore2] devstack-image-cache_base/038d3b5b-38fb-498f-b4cc-5ed167e098c3 {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 917.787022] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_26d66003-bfbb-48dc-97ce-b4992f3180a6/OSTACK_IMG_26d66003-bfbb-48dc-97ce-b4992f3180a6.vmdk to [datastore2] devstack-image-cache_base/038d3b5b-38fb-498f-b4cc-5ed167e098c3/038d3b5b-38fb-498f-b4cc-5ed167e098c3.vmdk. {{(pid=70020) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 917.787022] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-87cde434-1b37-4410-ad49-11f080420caa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.797073] env[70020]: DEBUG oslo_vmware.api [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 917.797073] env[70020]: value = "task-3618479" [ 917.797073] env[70020]: _type = "Task" [ 917.797073] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.810091] env[70020]: DEBUG oslo_vmware.api [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618479, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.895951] env[70020]: DEBUG nova.compute.manager [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 917.897659] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618475, 'name': CreateVM_Task, 'duration_secs': 0.466245} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.902195] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 917.902195] env[70020]: DEBUG oslo_concurrency.lockutils [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.902195] env[70020]: DEBUG oslo_concurrency.lockutils [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 917.902195] env[70020]: DEBUG oslo_concurrency.lockutils [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 917.902195] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-560ee15e-9fb8-442a-8ac8-737c6d3569b5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.912140] env[70020]: DEBUG oslo_vmware.api [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 917.912140] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]529bfeff-20fa-df67-4290-d145743a72c9" [ 917.912140] env[70020]: _type = "Task" [ 917.912140] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.928622] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: d885ddf7-6ec6-46b8-8fd1-7ca7e6a37456] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 917.930626] env[70020]: DEBUG oslo_vmware.api [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]529bfeff-20fa-df67-4290-d145743a72c9, 'name': SearchDatastore_Task, 'duration_secs': 0.012552} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.933094] env[70020]: DEBUG nova.virt.hardware [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 917.933458] env[70020]: DEBUG nova.virt.hardware [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 917.933618] env[70020]: DEBUG nova.virt.hardware [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 917.933805] env[70020]: DEBUG nova.virt.hardware [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 917.933948] env[70020]: DEBUG nova.virt.hardware [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 917.934185] env[70020]: DEBUG nova.virt.hardware [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 917.934430] env[70020]: DEBUG nova.virt.hardware [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 917.934590] env[70020]: DEBUG nova.virt.hardware [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 917.934753] env[70020]: DEBUG nova.virt.hardware [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 917.934912] env[70020]: DEBUG nova.virt.hardware [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 917.935101] env[70020]: DEBUG nova.virt.hardware [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 917.935700] env[70020]: DEBUG oslo_concurrency.lockutils [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 917.935964] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 917.936360] env[70020]: DEBUG oslo_concurrency.lockutils [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.936516] env[70020]: DEBUG oslo_concurrency.lockutils [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 917.936780] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 917.937626] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c30fee9-3e15-4766-b6a1-3c2abc02e6db {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.941982] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-90339433-eb99-4cc4-822b-269da89bfefe {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.950980] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f39d4331-3551-4f19-836c-f7c4142326f8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.956268] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 917.956523] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 917.957722] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed25733a-afdb-4ce7-9b8e-a67bda777c0a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.976712] env[70020]: DEBUG oslo_vmware.api [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 917.976712] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5252253e-20ac-c016-7200-ca95f7fc553d" [ 917.976712] env[70020]: _type = "Task" [ 917.976712] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.986026] env[70020]: DEBUG oslo_vmware.api [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5252253e-20ac-c016-7200-ca95f7fc553d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.127111] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618477, 'name': PowerOnVM_Task} progress is 90%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.261910] env[70020]: DEBUG nova.scheduler.client.report [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 918.267371] env[70020]: DEBUG nova.network.neutron [req-4bef8096-a751-4aeb-b16b-4c45ff7bdcd3 req-cdd51724-ad13-460f-ad8b-09110e57ce37 service nova] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Updated VIF entry in instance network info cache for port 40bf9877-260b-49fc-85fd-307072a733f1. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 918.267749] env[70020]: DEBUG nova.network.neutron [req-4bef8096-a751-4aeb-b16b-4c45ff7bdcd3 req-cdd51724-ad13-460f-ad8b-09110e57ce37 service nova] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Updating instance_info_cache with network_info: [{"id": "40bf9877-260b-49fc-85fd-307072a733f1", "address": "fa:16:3e:c1:4b:73", "network": {"id": "83a3ee04-e0ff-40e7-ae51-c463add43abb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2003290189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cfa7d3b1f5a14c60b19cde5030c2f0a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40bf9877-26", "ovs_interfaceid": "40bf9877-260b-49fc-85fd-307072a733f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.305528] env[70020]: DEBUG nova.compute.manager [req-e083860e-09e3-4d96-8390-1b4ceabdccd9 req-0aee3597-0ff5-432e-aad6-f3172f0108cb service nova] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Received event network-vif-deleted-063f2ba9-4e54-4d7e-9771-1defdefc3c00 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 918.312682] env[70020]: DEBUG oslo_vmware.api [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618479, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.443173] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: d0756709-f17b-441e-b537-df937cfbde84] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 918.492790] env[70020]: DEBUG oslo_vmware.api [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5252253e-20ac-c016-7200-ca95f7fc553d, 'name': SearchDatastore_Task, 'duration_secs': 0.015121} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.493854] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae6bc62a-a7b3-44e1-b650-bd8ce21007c6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.503103] env[70020]: DEBUG oslo_vmware.api [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 918.503103] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52dda988-f0fe-658d-b3a3-46d7e613049f" [ 918.503103] env[70020]: _type = "Task" [ 918.503103] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.517748] env[70020]: DEBUG oslo_vmware.api [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52dda988-f0fe-658d-b3a3-46d7e613049f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.628225] env[70020]: DEBUG oslo_vmware.api [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618477, 'name': PowerOnVM_Task, 'duration_secs': 0.779361} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.629081] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 918.629441] env[70020]: INFO nova.compute.manager [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Took 10.09 seconds to spawn the instance on the hypervisor. [ 918.629984] env[70020]: DEBUG nova.compute.manager [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 918.631896] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a553bcdc-8f2b-45c1-96af-8934788ea7e3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.768107] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.911s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 918.768811] env[70020]: DEBUG nova.compute.manager [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 918.771727] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.359s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 918.773301] env[70020]: INFO nova.compute.claims [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 918.778018] env[70020]: DEBUG oslo_concurrency.lockutils [req-4bef8096-a751-4aeb-b16b-4c45ff7bdcd3 req-cdd51724-ad13-460f-ad8b-09110e57ce37 service nova] Releasing lock "refresh_cache-a8982c31-ea86-4a8d-b8c6-006263ef41f8" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 918.818049] env[70020]: DEBUG oslo_vmware.api [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618479, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.947311] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 08ce6bc8-30fe-4c63-80e1-26c84ae75702] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 919.017548] env[70020]: DEBUG oslo_vmware.api [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52dda988-f0fe-658d-b3a3-46d7e613049f, 'name': SearchDatastore_Task, 'duration_secs': 0.091923} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.025335] env[70020]: DEBUG oslo_concurrency.lockutils [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 919.025335] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] a8982c31-ea86-4a8d-b8c6-006263ef41f8/a8982c31-ea86-4a8d-b8c6-006263ef41f8.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 919.025443] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eea50411-220d-485e-a119-2f78a3dcfec4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.041264] env[70020]: DEBUG oslo_vmware.api [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 919.041264] env[70020]: value = "task-3618480" [ 919.041264] env[70020]: _type = "Task" [ 919.041264] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.056960] env[70020]: DEBUG oslo_vmware.api [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618480, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.157240] env[70020]: INFO nova.compute.manager [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Took 58.44 seconds to build instance. [ 919.281906] env[70020]: DEBUG nova.compute.utils [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 919.283938] env[70020]: DEBUG nova.compute.manager [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 919.284186] env[70020]: DEBUG nova.network.neutron [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 919.312570] env[70020]: DEBUG oslo_vmware.api [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618479, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.354501] env[70020]: DEBUG nova.policy [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dec9771972184de8926b885067533a6d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f14ea4f517a04de69f8bc56a19f2be8e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 919.452669] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: d601179a-df77-4f2e-b8df-9185b8a485e3] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 919.556695] env[70020]: DEBUG oslo_vmware.api [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618480, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.659627] env[70020]: DEBUG oslo_concurrency.lockutils [None req-203e4e8e-5e02-4219-96b6-0d1e0dd3b67b tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "d65ab5e0-189c-43e1-accf-16248ad02852" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.171s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 919.699559] env[70020]: DEBUG nova.network.neutron [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Successfully updated port: 7648a826-e268-4333-96ce-f336ff254b66 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 919.736853] env[70020]: DEBUG nova.network.neutron [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Successfully created port: 3a364f68-e0c0-48ae-ab70-f576cd06610e {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 919.790892] env[70020]: DEBUG nova.compute.manager [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 919.812808] env[70020]: DEBUG oslo_vmware.api [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618479, 'name': MoveVirtualDisk_Task} progress is 80%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.961272] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 79d98176-b566-4349-ad10-c2ea6fdbc657] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 920.053979] env[70020]: DEBUG oslo_vmware.api [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618480, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.165802] env[70020]: DEBUG nova.compute.manager [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 920.202783] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Acquiring lock "refresh_cache-45926a02-d0fe-4274-ba47-b97b3e12e4cd" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 920.202947] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Acquired lock "refresh_cache-45926a02-d0fe-4274-ba47-b97b3e12e4cd" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 920.203564] env[70020]: DEBUG nova.network.neutron [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 920.315957] env[70020]: DEBUG oslo_vmware.api [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618479, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.440288] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-421d627c-64a5-4aac-b36a-407e01cd659b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.448341] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5a05121-2955-4414-920d-c7d924226d51 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.480253] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: a0b4a0b0-748d-46eb-9e39-3f21e394c090] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 920.483047] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c1f6eee-09b6-40ab-b515-8218bf0e8bca {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.491238] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ad30fae-8359-4365-aa10-b974941053e4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.506461] env[70020]: DEBUG nova.compute.provider_tree [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 920.555282] env[70020]: DEBUG oslo_vmware.api [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618480, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.557649] env[70020]: DEBUG nova.compute.manager [req-00a084ae-fa34-4716-9b8e-50863a833ad5 req-a503ef45-54a9-4640-982a-6889a2a15471 service nova] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Received event network-vif-plugged-7648a826-e268-4333-96ce-f336ff254b66 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 920.557902] env[70020]: DEBUG oslo_concurrency.lockutils [req-00a084ae-fa34-4716-9b8e-50863a833ad5 req-a503ef45-54a9-4640-982a-6889a2a15471 service nova] Acquiring lock "45926a02-d0fe-4274-ba47-b97b3e12e4cd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 920.558234] env[70020]: DEBUG oslo_concurrency.lockutils [req-00a084ae-fa34-4716-9b8e-50863a833ad5 req-a503ef45-54a9-4640-982a-6889a2a15471 service nova] Lock "45926a02-d0fe-4274-ba47-b97b3e12e4cd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 920.558483] env[70020]: DEBUG oslo_concurrency.lockutils [req-00a084ae-fa34-4716-9b8e-50863a833ad5 req-a503ef45-54a9-4640-982a-6889a2a15471 service nova] Lock "45926a02-d0fe-4274-ba47-b97b3e12e4cd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 920.558906] env[70020]: DEBUG nova.compute.manager [req-00a084ae-fa34-4716-9b8e-50863a833ad5 req-a503ef45-54a9-4640-982a-6889a2a15471 service nova] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] No waiting events found dispatching network-vif-plugged-7648a826-e268-4333-96ce-f336ff254b66 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 920.559352] env[70020]: WARNING nova.compute.manager [req-00a084ae-fa34-4716-9b8e-50863a833ad5 req-a503ef45-54a9-4640-982a-6889a2a15471 service nova] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Received unexpected event network-vif-plugged-7648a826-e268-4333-96ce-f336ff254b66 for instance with vm_state building and task_state spawning. [ 920.559590] env[70020]: DEBUG nova.compute.manager [req-00a084ae-fa34-4716-9b8e-50863a833ad5 req-a503ef45-54a9-4640-982a-6889a2a15471 service nova] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Received event network-changed-7648a826-e268-4333-96ce-f336ff254b66 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 920.562038] env[70020]: DEBUG nova.compute.manager [req-00a084ae-fa34-4716-9b8e-50863a833ad5 req-a503ef45-54a9-4640-982a-6889a2a15471 service nova] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Refreshing instance network info cache due to event network-changed-7648a826-e268-4333-96ce-f336ff254b66. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 920.562038] env[70020]: DEBUG oslo_concurrency.lockutils [req-00a084ae-fa34-4716-9b8e-50863a833ad5 req-a503ef45-54a9-4640-982a-6889a2a15471 service nova] Acquiring lock "refresh_cache-45926a02-d0fe-4274-ba47-b97b3e12e4cd" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 920.695218] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 920.767328] env[70020]: DEBUG nova.network.neutron [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 920.804803] env[70020]: DEBUG nova.compute.manager [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 920.818822] env[70020]: DEBUG oslo_vmware.api [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618479, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.835994} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.819386] env[70020]: INFO nova.virt.vmwareapi.ds_util [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_26d66003-bfbb-48dc-97ce-b4992f3180a6/OSTACK_IMG_26d66003-bfbb-48dc-97ce-b4992f3180a6.vmdk to [datastore2] devstack-image-cache_base/038d3b5b-38fb-498f-b4cc-5ed167e098c3/038d3b5b-38fb-498f-b4cc-5ed167e098c3.vmdk. [ 920.819639] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Cleaning up location [datastore2] OSTACK_IMG_26d66003-bfbb-48dc-97ce-b4992f3180a6 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 920.819843] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_26d66003-bfbb-48dc-97ce-b4992f3180a6 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 920.820176] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-47e7e808-5714-4c3e-8e32-e01c9b84174f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.833589] env[70020]: DEBUG nova.virt.hardware [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 920.833907] env[70020]: DEBUG nova.virt.hardware [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 920.834111] env[70020]: DEBUG nova.virt.hardware [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 920.835060] env[70020]: DEBUG nova.virt.hardware [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 920.835288] env[70020]: DEBUG nova.virt.hardware [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 920.835374] env[70020]: DEBUG nova.virt.hardware [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 920.835584] env[70020]: DEBUG nova.virt.hardware [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 920.835746] env[70020]: DEBUG nova.virt.hardware [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 920.836477] env[70020]: DEBUG nova.virt.hardware [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 920.836717] env[70020]: DEBUG nova.virt.hardware [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 920.836908] env[70020]: DEBUG nova.virt.hardware [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 920.838048] env[70020]: DEBUG oslo_vmware.api [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 920.838048] env[70020]: value = "task-3618481" [ 920.838048] env[70020]: _type = "Task" [ 920.838048] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.838897] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcde9248-2d64-46ec-ac3f-4c00af07da9a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.851511] env[70020]: DEBUG oslo_vmware.api [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618481, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.855157] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc4e1174-73b1-44bd-8b1f-8680b65e57f1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.983385] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 372e5569-8824-4841-b3d6-4b07423c7b3d] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 921.009188] env[70020]: DEBUG nova.scheduler.client.report [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 921.063026] env[70020]: DEBUG oslo_vmware.api [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618480, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.120736] env[70020]: DEBUG nova.network.neutron [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Updating instance_info_cache with network_info: [{"id": "7648a826-e268-4333-96ce-f336ff254b66", "address": "fa:16:3e:99:29:9b", "network": {"id": "28d7e64a-7917-46ed-91bf-c239aad81e05", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-285076897-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f14ea4f517a04de69f8bc56a19f2be8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "30c39e9a-a798-4f25-a48c-91f786ba332c", "external-id": "nsx-vlan-transportzone-438", "segmentation_id": 438, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7648a826-e2", "ovs_interfaceid": "7648a826-e268-4333-96ce-f336ff254b66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.148470] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8e5e3c9a-8f68-4dc3-b34f-bbdf12f20975 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquiring lock "61875dcc-5b76-409b-987f-4ae875909257" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 921.148743] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8e5e3c9a-8f68-4dc3-b34f-bbdf12f20975 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "61875dcc-5b76-409b-987f-4ae875909257" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 921.149024] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8e5e3c9a-8f68-4dc3-b34f-bbdf12f20975 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquiring lock "61875dcc-5b76-409b-987f-4ae875909257-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 921.149146] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8e5e3c9a-8f68-4dc3-b34f-bbdf12f20975 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "61875dcc-5b76-409b-987f-4ae875909257-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 921.149319] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8e5e3c9a-8f68-4dc3-b34f-bbdf12f20975 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "61875dcc-5b76-409b-987f-4ae875909257-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.151914] env[70020]: INFO nova.compute.manager [None req-8e5e3c9a-8f68-4dc3-b34f-bbdf12f20975 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Terminating instance [ 921.237994] env[70020]: DEBUG oslo_concurrency.lockutils [None req-57a59576-14ab-49c9-9c2b-c87f4b756353 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquiring lock "d65ab5e0-189c-43e1-accf-16248ad02852" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 921.238456] env[70020]: DEBUG oslo_concurrency.lockutils [None req-57a59576-14ab-49c9-9c2b-c87f4b756353 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "d65ab5e0-189c-43e1-accf-16248ad02852" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 921.239066] env[70020]: DEBUG oslo_concurrency.lockutils [None req-57a59576-14ab-49c9-9c2b-c87f4b756353 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquiring lock "d65ab5e0-189c-43e1-accf-16248ad02852-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 921.239066] env[70020]: DEBUG oslo_concurrency.lockutils [None req-57a59576-14ab-49c9-9c2b-c87f4b756353 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "d65ab5e0-189c-43e1-accf-16248ad02852-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 921.239257] env[70020]: DEBUG oslo_concurrency.lockutils [None req-57a59576-14ab-49c9-9c2b-c87f4b756353 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "d65ab5e0-189c-43e1-accf-16248ad02852-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.242901] env[70020]: INFO nova.compute.manager [None req-57a59576-14ab-49c9-9c2b-c87f4b756353 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Terminating instance [ 921.356298] env[70020]: DEBUG oslo_vmware.api [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618481, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170813} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.359458] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 921.359458] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Releasing lock "[datastore2] devstack-image-cache_base/038d3b5b-38fb-498f-b4cc-5ed167e098c3/038d3b5b-38fb-498f-b4cc-5ed167e098c3.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 921.359458] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/038d3b5b-38fb-498f-b4cc-5ed167e098c3/038d3b5b-38fb-498f-b4cc-5ed167e098c3.vmdk to [datastore2] 8317f386-44d0-4b1b-8590-d0336fafac21/8317f386-44d0-4b1b-8590-d0336fafac21.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 921.359458] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-72aaf2c3-f12e-4065-82bf-095226f79a34 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.364659] env[70020]: DEBUG oslo_vmware.api [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 921.364659] env[70020]: value = "task-3618482" [ 921.364659] env[70020]: _type = "Task" [ 921.364659] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.374464] env[70020]: DEBUG oslo_vmware.api [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618482, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.488543] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: bb4e4986-af2a-4832-9ec7-777bca863dce] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 921.518875] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.747s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.522821] env[70020]: DEBUG nova.compute.manager [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 921.527693] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7100d9e6-638e-4f44-a958-aca8ef9b0e1a tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.897s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 921.528797] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7100d9e6-638e-4f44-a958-aca8ef9b0e1a tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.530874] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.748s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 921.532382] env[70020]: INFO nova.compute.claims [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 921.558955] env[70020]: DEBUG oslo_vmware.api [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618480, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.567652] env[70020]: INFO nova.scheduler.client.report [None req-7100d9e6-638e-4f44-a958-aca8ef9b0e1a tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Deleted allocations for instance d3dbc3d1-bba7-4803-bacb-02de27a6a4ff [ 921.623980] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Releasing lock "refresh_cache-45926a02-d0fe-4274-ba47-b97b3e12e4cd" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 921.624545] env[70020]: DEBUG nova.compute.manager [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Instance network_info: |[{"id": "7648a826-e268-4333-96ce-f336ff254b66", "address": "fa:16:3e:99:29:9b", "network": {"id": "28d7e64a-7917-46ed-91bf-c239aad81e05", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-285076897-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f14ea4f517a04de69f8bc56a19f2be8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "30c39e9a-a798-4f25-a48c-91f786ba332c", "external-id": "nsx-vlan-transportzone-438", "segmentation_id": 438, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7648a826-e2", "ovs_interfaceid": "7648a826-e268-4333-96ce-f336ff254b66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 921.624905] env[70020]: DEBUG oslo_concurrency.lockutils [req-00a084ae-fa34-4716-9b8e-50863a833ad5 req-a503ef45-54a9-4640-982a-6889a2a15471 service nova] Acquired lock "refresh_cache-45926a02-d0fe-4274-ba47-b97b3e12e4cd" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 921.625120] env[70020]: DEBUG nova.network.neutron [req-00a084ae-fa34-4716-9b8e-50863a833ad5 req-a503ef45-54a9-4640-982a-6889a2a15471 service nova] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Refreshing network info cache for port 7648a826-e268-4333-96ce-f336ff254b66 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 921.626619] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:99:29:9b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '30c39e9a-a798-4f25-a48c-91f786ba332c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7648a826-e268-4333-96ce-f336ff254b66', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 921.635241] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Creating folder: Project (f14ea4f517a04de69f8bc56a19f2be8e). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 921.638522] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8b4897c5-76c6-4533-96be-723bf2f27a32 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.651722] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Created folder: Project (f14ea4f517a04de69f8bc56a19f2be8e) in parent group-v721521. [ 921.651978] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Creating folder: Instances. Parent ref: group-v721726. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 921.652361] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6ae8cf1d-62a1-44f7-852f-228914eae4ba {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.659487] env[70020]: DEBUG nova.compute.manager [None req-8e5e3c9a-8f68-4dc3-b34f-bbdf12f20975 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 921.659637] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8e5e3c9a-8f68-4dc3-b34f-bbdf12f20975 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 921.660838] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32c29022-5ffa-4107-8365-7d8fe1bbab71 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.665485] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Created folder: Instances in parent group-v721726. [ 921.665751] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 921.666529] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 921.666782] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-119cfe9f-f30b-46ca-83af-a59a66f0993a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.687964] env[70020]: DEBUG nova.network.neutron [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Successfully updated port: 3a364f68-e0c0-48ae-ab70-f576cd06610e {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 921.692060] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e5e3c9a-8f68-4dc3-b34f-bbdf12f20975 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 921.692060] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5371216a-de7c-4cb0-a9ca-501d2a667c24 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.694946] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 921.694946] env[70020]: value = "task-3618485" [ 921.694946] env[70020]: _type = "Task" [ 921.694946] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.700262] env[70020]: DEBUG oslo_vmware.api [None req-8e5e3c9a-8f68-4dc3-b34f-bbdf12f20975 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 921.700262] env[70020]: value = "task-3618486" [ 921.700262] env[70020]: _type = "Task" [ 921.700262] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.709323] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618485, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.715593] env[70020]: DEBUG oslo_vmware.api [None req-8e5e3c9a-8f68-4dc3-b34f-bbdf12f20975 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618486, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.757197] env[70020]: DEBUG nova.compute.manager [None req-57a59576-14ab-49c9-9c2b-c87f4b756353 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 921.757197] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-57a59576-14ab-49c9-9c2b-c87f4b756353 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 921.757197] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68c1f1a7-95d3-42ed-a6f7-8e21eef9e975 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.765574] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-57a59576-14ab-49c9-9c2b-c87f4b756353 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 921.765574] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-79bc7846-0b8a-4094-b6d8-c1bd72545ca5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.777023] env[70020]: DEBUG oslo_vmware.api [None req-57a59576-14ab-49c9-9c2b-c87f4b756353 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 921.777023] env[70020]: value = "task-3618487" [ 921.777023] env[70020]: _type = "Task" [ 921.777023] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.786580] env[70020]: DEBUG oslo_vmware.api [None req-57a59576-14ab-49c9-9c2b-c87f4b756353 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618487, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.871520] env[70020]: DEBUG nova.compute.manager [req-af898374-1137-4aad-ba8f-e9230ba128af req-de14b275-f07a-42ee-9e13-cd29cdccf586 service nova] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Received event network-vif-plugged-3a364f68-e0c0-48ae-ab70-f576cd06610e {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 921.871764] env[70020]: DEBUG oslo_concurrency.lockutils [req-af898374-1137-4aad-ba8f-e9230ba128af req-de14b275-f07a-42ee-9e13-cd29cdccf586 service nova] Acquiring lock "7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 921.871983] env[70020]: DEBUG oslo_concurrency.lockutils [req-af898374-1137-4aad-ba8f-e9230ba128af req-de14b275-f07a-42ee-9e13-cd29cdccf586 service nova] Lock "7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 921.872265] env[70020]: DEBUG oslo_concurrency.lockutils [req-af898374-1137-4aad-ba8f-e9230ba128af req-de14b275-f07a-42ee-9e13-cd29cdccf586 service nova] Lock "7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.872453] env[70020]: DEBUG nova.compute.manager [req-af898374-1137-4aad-ba8f-e9230ba128af req-de14b275-f07a-42ee-9e13-cd29cdccf586 service nova] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] No waiting events found dispatching network-vif-plugged-3a364f68-e0c0-48ae-ab70-f576cd06610e {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 921.872628] env[70020]: WARNING nova.compute.manager [req-af898374-1137-4aad-ba8f-e9230ba128af req-de14b275-f07a-42ee-9e13-cd29cdccf586 service nova] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Received unexpected event network-vif-plugged-3a364f68-e0c0-48ae-ab70-f576cd06610e for instance with vm_state building and task_state spawning. [ 921.882018] env[70020]: DEBUG oslo_vmware.api [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618482, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.992365] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: b0b825d4-534d-4d54-a0c4-b9e507726c47] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 922.040024] env[70020]: DEBUG nova.compute.utils [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 922.043466] env[70020]: DEBUG nova.compute.manager [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 922.043898] env[70020]: DEBUG nova.network.neutron [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 922.060159] env[70020]: DEBUG oslo_vmware.api [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618480, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.976576} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.061341] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] a8982c31-ea86-4a8d-b8c6-006263ef41f8/a8982c31-ea86-4a8d-b8c6-006263ef41f8.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 922.061748] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 922.062523] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1f999a39-ade6-4c22-998b-b6d4ccb51b1b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.074654] env[70020]: DEBUG oslo_vmware.api [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 922.074654] env[70020]: value = "task-3618488" [ 922.074654] env[70020]: _type = "Task" [ 922.074654] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.075420] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7100d9e6-638e-4f44-a958-aca8ef9b0e1a tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Lock "d3dbc3d1-bba7-4803-bacb-02de27a6a4ff" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.401s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 922.087430] env[70020]: DEBUG oslo_vmware.api [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618488, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.089162] env[70020]: DEBUG nova.network.neutron [req-00a084ae-fa34-4716-9b8e-50863a833ad5 req-a503ef45-54a9-4640-982a-6889a2a15471 service nova] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Updated VIF entry in instance network info cache for port 7648a826-e268-4333-96ce-f336ff254b66. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 922.089809] env[70020]: DEBUG nova.network.neutron [req-00a084ae-fa34-4716-9b8e-50863a833ad5 req-a503ef45-54a9-4640-982a-6889a2a15471 service nova] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Updating instance_info_cache with network_info: [{"id": "7648a826-e268-4333-96ce-f336ff254b66", "address": "fa:16:3e:99:29:9b", "network": {"id": "28d7e64a-7917-46ed-91bf-c239aad81e05", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-285076897-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f14ea4f517a04de69f8bc56a19f2be8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "30c39e9a-a798-4f25-a48c-91f786ba332c", "external-id": "nsx-vlan-transportzone-438", "segmentation_id": 438, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7648a826-e2", "ovs_interfaceid": "7648a826-e268-4333-96ce-f336ff254b66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 922.112221] env[70020]: DEBUG nova.policy [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dec9771972184de8926b885067533a6d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f14ea4f517a04de69f8bc56a19f2be8e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 922.192748] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Acquiring lock "refresh_cache-7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.192748] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Acquired lock "refresh_cache-7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 922.192748] env[70020]: DEBUG nova.network.neutron [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 922.217023] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618485, 'name': CreateVM_Task, 'duration_secs': 0.493095} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.218110] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 922.219327] env[70020]: DEBUG oslo_vmware.api [None req-8e5e3c9a-8f68-4dc3-b34f-bbdf12f20975 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618486, 'name': PowerOffVM_Task, 'duration_secs': 0.258314} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.220297] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.220940] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 922.222257] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 922.222626] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e5e3c9a-8f68-4dc3-b34f-bbdf12f20975 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 922.223697] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8e5e3c9a-8f68-4dc3-b34f-bbdf12f20975 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 922.227022] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e834ff4c-5b36-4c76-833a-cae3f7e57276 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.227022] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a6378d4b-2e92-44a5-b640-ac6e2d5953c3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.235905] env[70020]: DEBUG oslo_vmware.api [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Waiting for the task: (returnval){ [ 922.235905] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]529771b2-17b7-9d96-ed69-c337807de7d4" [ 922.235905] env[70020]: _type = "Task" [ 922.235905] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.248642] env[70020]: DEBUG oslo_vmware.api [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]529771b2-17b7-9d96-ed69-c337807de7d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.290110] env[70020]: DEBUG oslo_vmware.api [None req-57a59576-14ab-49c9-9c2b-c87f4b756353 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618487, 'name': PowerOffVM_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.300397] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8e5e3c9a-8f68-4dc3-b34f-bbdf12f20975 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 922.300622] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8e5e3c9a-8f68-4dc3-b34f-bbdf12f20975 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 922.300792] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e5e3c9a-8f68-4dc3-b34f-bbdf12f20975 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Deleting the datastore file [datastore1] 61875dcc-5b76-409b-987f-4ae875909257 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 922.301083] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-11e9745c-283d-482e-89dd-a07b482b03d5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.308583] env[70020]: DEBUG oslo_vmware.api [None req-8e5e3c9a-8f68-4dc3-b34f-bbdf12f20975 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 922.308583] env[70020]: value = "task-3618490" [ 922.308583] env[70020]: _type = "Task" [ 922.308583] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.318512] env[70020]: DEBUG oslo_vmware.api [None req-8e5e3c9a-8f68-4dc3-b34f-bbdf12f20975 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618490, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.378919] env[70020]: DEBUG oslo_vmware.api [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618482, 'name': CopyVirtualDisk_Task} progress is 15%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.448687] env[70020]: DEBUG nova.network.neutron [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Successfully created port: ad1f3bb4-6fad-4f75-ad98-b53b3676f111 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 922.499702] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 1f95bfa8-bc97-4ed7-8c33-c00297430bf5] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 922.544608] env[70020]: DEBUG nova.compute.manager [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 922.592914] env[70020]: DEBUG oslo_concurrency.lockutils [req-00a084ae-fa34-4716-9b8e-50863a833ad5 req-a503ef45-54a9-4640-982a-6889a2a15471 service nova] Releasing lock "refresh_cache-45926a02-d0fe-4274-ba47-b97b3e12e4cd" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 922.593479] env[70020]: DEBUG oslo_vmware.api [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618488, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.09005} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.596108] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 922.596108] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8212fcde-be14-472f-9333-d53801a708cf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.633964] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] a8982c31-ea86-4a8d-b8c6-006263ef41f8/a8982c31-ea86-4a8d-b8c6-006263ef41f8.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 922.637200] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eeb64e48-227e-41bc-9931-e1c6ebdaba12 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.663554] env[70020]: DEBUG oslo_vmware.api [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 922.663554] env[70020]: value = "task-3618491" [ 922.663554] env[70020]: _type = "Task" [ 922.663554] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.680379] env[70020]: DEBUG oslo_vmware.api [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618491, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.753957] env[70020]: DEBUG oslo_vmware.api [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]529771b2-17b7-9d96-ed69-c337807de7d4, 'name': SearchDatastore_Task, 'duration_secs': 0.081625} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.754421] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 922.754704] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 922.754945] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.755119] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 922.755330] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 922.755614] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a10bd810-1e0a-4c56-bc9b-b357aa6b2130 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.784283] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 922.785353] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 922.790454] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7bec0125-6563-44b6-8968-cc70525cb964 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.800887] env[70020]: DEBUG oslo_vmware.api [None req-57a59576-14ab-49c9-9c2b-c87f4b756353 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618487, 'name': PowerOffVM_Task, 'duration_secs': 0.545478} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.805020] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-57a59576-14ab-49c9-9c2b-c87f4b756353 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 922.805020] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-57a59576-14ab-49c9-9c2b-c87f4b756353 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 922.805020] env[70020]: DEBUG oslo_vmware.api [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Waiting for the task: (returnval){ [ 922.805020] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5203960a-b360-4226-75fd-1bce069d7d1b" [ 922.805020] env[70020]: _type = "Task" [ 922.805020] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.805020] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bdf19350-1ff0-444e-b5f2-20bab3dd1725 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.825036] env[70020]: DEBUG oslo_vmware.api [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5203960a-b360-4226-75fd-1bce069d7d1b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.826634] env[70020]: DEBUG oslo_vmware.api [None req-8e5e3c9a-8f68-4dc3-b34f-bbdf12f20975 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618490, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.317102} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.830154] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e5e3c9a-8f68-4dc3-b34f-bbdf12f20975 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 922.830344] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8e5e3c9a-8f68-4dc3-b34f-bbdf12f20975 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 922.830689] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8e5e3c9a-8f68-4dc3-b34f-bbdf12f20975 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 922.831031] env[70020]: INFO nova.compute.manager [None req-8e5e3c9a-8f68-4dc3-b34f-bbdf12f20975 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Took 1.17 seconds to destroy the instance on the hypervisor. [ 922.831226] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8e5e3c9a-8f68-4dc3-b34f-bbdf12f20975 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 922.831663] env[70020]: DEBUG nova.compute.manager [-] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 922.831765] env[70020]: DEBUG nova.network.neutron [-] [instance: 61875dcc-5b76-409b-987f-4ae875909257] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 922.878786] env[70020]: DEBUG oslo_vmware.api [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618482, 'name': CopyVirtualDisk_Task} progress is 35%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.964871] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-57a59576-14ab-49c9-9c2b-c87f4b756353 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 922.964871] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-57a59576-14ab-49c9-9c2b-c87f4b756353 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 922.965222] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-57a59576-14ab-49c9-9c2b-c87f4b756353 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Deleting the datastore file [datastore2] d65ab5e0-189c-43e1-accf-16248ad02852 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 922.965935] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e4389af3-5be4-46b8-9ee5-337f236011b7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.979092] env[70020]: DEBUG oslo_vmware.api [None req-57a59576-14ab-49c9-9c2b-c87f4b756353 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for the task: (returnval){ [ 922.979092] env[70020]: value = "task-3618493" [ 922.979092] env[70020]: _type = "Task" [ 922.979092] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.993948] env[70020]: DEBUG oslo_vmware.api [None req-57a59576-14ab-49c9-9c2b-c87f4b756353 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618493, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.011776] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 6a114dce-7ed3-46e1-9d50-c3dd6efd340c] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 923.026121] env[70020]: DEBUG nova.network.neutron [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 923.180139] env[70020]: DEBUG oslo_vmware.api [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618491, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.271785] env[70020]: DEBUG nova.network.neutron [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Updating instance_info_cache with network_info: [{"id": "3a364f68-e0c0-48ae-ab70-f576cd06610e", "address": "fa:16:3e:27:9a:59", "network": {"id": "28d7e64a-7917-46ed-91bf-c239aad81e05", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-285076897-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f14ea4f517a04de69f8bc56a19f2be8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "30c39e9a-a798-4f25-a48c-91f786ba332c", "external-id": "nsx-vlan-transportzone-438", "segmentation_id": 438, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a364f68-e0", "ovs_interfaceid": "3a364f68-e0c0-48ae-ab70-f576cd06610e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.299673] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cb7da87-d7bb-4569-8e23-a873f47fc27f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.315941] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31edcef2-db07-4a51-a05e-72f3d8f050f6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.324884] env[70020]: DEBUG oslo_vmware.api [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5203960a-b360-4226-75fd-1bce069d7d1b, 'name': SearchDatastore_Task, 'duration_secs': 0.093053} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.326298] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1858c297-4a19-4a59-9deb-ab2792d2709f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.361439] env[70020]: DEBUG oslo_vmware.api [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Waiting for the task: (returnval){ [ 923.361439] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]525a3e35-314f-d759-37e9-ad96c83ec3d7" [ 923.361439] env[70020]: _type = "Task" [ 923.361439] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.363287] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37795d10-52a0-4819-abcb-d66b7f739c9d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.385090] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12aaaa4e-c7bf-4960-8df6-22a5d761d986 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.390186] env[70020]: DEBUG oslo_vmware.api [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]525a3e35-314f-d759-37e9-ad96c83ec3d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.390471] env[70020]: DEBUG oslo_vmware.api [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618482, 'name': CopyVirtualDisk_Task} progress is 54%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.404648] env[70020]: DEBUG nova.compute.provider_tree [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 923.491834] env[70020]: DEBUG oslo_vmware.api [None req-57a59576-14ab-49c9-9c2b-c87f4b756353 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618493, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.515973] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 516341a3-2230-4340-a1e0-ff97bb7a608d] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 923.564457] env[70020]: DEBUG nova.compute.manager [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 923.598165] env[70020]: DEBUG nova.virt.hardware [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 923.598977] env[70020]: DEBUG nova.virt.hardware [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 923.599412] env[70020]: DEBUG nova.virt.hardware [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 923.599667] env[70020]: DEBUG nova.virt.hardware [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 923.599887] env[70020]: DEBUG nova.virt.hardware [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 923.600285] env[70020]: DEBUG nova.virt.hardware [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 923.600685] env[70020]: DEBUG nova.virt.hardware [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 923.601305] env[70020]: DEBUG nova.virt.hardware [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 923.601583] env[70020]: DEBUG nova.virt.hardware [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 923.601973] env[70020]: DEBUG nova.virt.hardware [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 923.602304] env[70020]: DEBUG nova.virt.hardware [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 923.610027] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85d975b0-c471-439c-baa9-9b1f82faf4fc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.619245] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2165597-e15c-45e7-a2f8-89ca0e9d94d1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.676221] env[70020]: DEBUG oslo_vmware.api [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618491, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.680933] env[70020]: DEBUG nova.network.neutron [-] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.779064] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Releasing lock "refresh_cache-7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 923.779064] env[70020]: DEBUG nova.compute.manager [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Instance network_info: |[{"id": "3a364f68-e0c0-48ae-ab70-f576cd06610e", "address": "fa:16:3e:27:9a:59", "network": {"id": "28d7e64a-7917-46ed-91bf-c239aad81e05", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-285076897-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f14ea4f517a04de69f8bc56a19f2be8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "30c39e9a-a798-4f25-a48c-91f786ba332c", "external-id": "nsx-vlan-transportzone-438", "segmentation_id": 438, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a364f68-e0", "ovs_interfaceid": "3a364f68-e0c0-48ae-ab70-f576cd06610e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 923.779064] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:27:9a:59', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '30c39e9a-a798-4f25-a48c-91f786ba332c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3a364f68-e0c0-48ae-ab70-f576cd06610e', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 923.788189] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 923.788189] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 923.788189] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2264905e-1b0d-44d6-8cd6-6141dccbf7c3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.813865] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 923.813865] env[70020]: value = "task-3618494" [ 923.813865] env[70020]: _type = "Task" [ 923.813865] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.826535] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618494, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.875803] env[70020]: DEBUG oslo_vmware.api [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]525a3e35-314f-d759-37e9-ad96c83ec3d7, 'name': SearchDatastore_Task, 'duration_secs': 0.092812} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.879347] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 923.879639] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 45926a02-d0fe-4274-ba47-b97b3e12e4cd/45926a02-d0fe-4274-ba47-b97b3e12e4cd.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 923.879925] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dc269996-fb6f-4294-af1d-b0039d55e393 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.895024] env[70020]: DEBUG oslo_vmware.api [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618482, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.895024] env[70020]: DEBUG oslo_vmware.api [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Waiting for the task: (returnval){ [ 923.895024] env[70020]: value = "task-3618495" [ 923.895024] env[70020]: _type = "Task" [ 923.895024] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.908373] env[70020]: DEBUG nova.scheduler.client.report [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 923.916448] env[70020]: DEBUG oslo_vmware.api [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618495, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.993227] env[70020]: DEBUG oslo_vmware.api [None req-57a59576-14ab-49c9-9c2b-c87f4b756353 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618493, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.023322] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 81d5a1b4-1398-4fca-b500-aa2a3dc41494] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 924.184239] env[70020]: DEBUG oslo_vmware.api [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618491, 'name': ReconfigVM_Task, 'duration_secs': 1.499371} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.184239] env[70020]: DEBUG nova.network.neutron [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Successfully updated port: ad1f3bb4-6fad-4f75-ad98-b53b3676f111 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 924.185056] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Reconfigured VM instance instance-00000048 to attach disk [datastore2] a8982c31-ea86-4a8d-b8c6-006263ef41f8/a8982c31-ea86-4a8d-b8c6-006263ef41f8.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 924.185954] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-38c29ae1-0382-4aef-9617-ae6adc4be317 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.189579] env[70020]: INFO nova.compute.manager [-] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Took 1.36 seconds to deallocate network for instance. [ 924.201733] env[70020]: DEBUG oslo_vmware.api [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 924.201733] env[70020]: value = "task-3618496" [ 924.201733] env[70020]: _type = "Task" [ 924.201733] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.215020] env[70020]: DEBUG oslo_vmware.api [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618496, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.331405] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618494, 'name': CreateVM_Task} progress is 25%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.392875] env[70020]: DEBUG oslo_vmware.api [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618482, 'name': CopyVirtualDisk_Task} progress is 97%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.405697] env[70020]: DEBUG oslo_vmware.api [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618495, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.424023] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.889s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 924.424023] env[70020]: DEBUG nova.compute.manager [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 924.425545] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b9a07698-27fb-4a3a-a88f-9308c931d907 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.830s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 924.426088] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b9a07698-27fb-4a3a-a88f-9308c931d907 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 924.429025] env[70020]: DEBUG oslo_concurrency.lockutils [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.518s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 924.431104] env[70020]: INFO nova.compute.claims [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 924.438908] env[70020]: DEBUG oslo_concurrency.lockutils [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Acquiring lock "58dded95-033a-46d7-b02e-5b2f2551234c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 924.438908] env[70020]: DEBUG oslo_concurrency.lockutils [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Lock "58dded95-033a-46d7-b02e-5b2f2551234c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 924.446212] env[70020]: DEBUG nova.compute.manager [req-bcde3967-698c-4c38-a414-50afb081ab09 req-94b4ad38-7dc7-45a5-a036-570abe3aa445 service nova] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Received event network-changed-3a364f68-e0c0-48ae-ab70-f576cd06610e {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 924.446212] env[70020]: DEBUG nova.compute.manager [req-bcde3967-698c-4c38-a414-50afb081ab09 req-94b4ad38-7dc7-45a5-a036-570abe3aa445 service nova] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Refreshing instance network info cache due to event network-changed-3a364f68-e0c0-48ae-ab70-f576cd06610e. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 924.446588] env[70020]: DEBUG oslo_concurrency.lockutils [req-bcde3967-698c-4c38-a414-50afb081ab09 req-94b4ad38-7dc7-45a5-a036-570abe3aa445 service nova] Acquiring lock "refresh_cache-7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.447286] env[70020]: DEBUG oslo_concurrency.lockutils [req-bcde3967-698c-4c38-a414-50afb081ab09 req-94b4ad38-7dc7-45a5-a036-570abe3aa445 service nova] Acquired lock "refresh_cache-7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 924.447582] env[70020]: DEBUG nova.network.neutron [req-bcde3967-698c-4c38-a414-50afb081ab09 req-94b4ad38-7dc7-45a5-a036-570abe3aa445 service nova] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Refreshing network info cache for port 3a364f68-e0c0-48ae-ab70-f576cd06610e {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 924.480038] env[70020]: INFO nova.scheduler.client.report [None req-b9a07698-27fb-4a3a-a88f-9308c931d907 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Deleted allocations for instance 1d9218db-05d8-4e33-837f-e9865946237f [ 924.493044] env[70020]: DEBUG oslo_vmware.api [None req-57a59576-14ab-49c9-9c2b-c87f4b756353 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Task: {'id': task-3618493, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.48326} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.495016] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-57a59576-14ab-49c9-9c2b-c87f4b756353 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 924.495016] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-57a59576-14ab-49c9-9c2b-c87f4b756353 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 924.495016] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-57a59576-14ab-49c9-9c2b-c87f4b756353 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 924.495016] env[70020]: INFO nova.compute.manager [None req-57a59576-14ab-49c9-9c2b-c87f4b756353 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Took 2.74 seconds to destroy the instance on the hypervisor. [ 924.495016] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-57a59576-14ab-49c9-9c2b-c87f4b756353 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 924.495016] env[70020]: DEBUG nova.compute.manager [-] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 924.495016] env[70020]: DEBUG nova.network.neutron [-] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 924.528024] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 0cc49db6-1574-4e51-8692-b79ee14bc25d] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 924.687045] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Acquiring lock "refresh_cache-edef9245-4048-4ea4-90cc-ebed54498d88" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.687045] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Acquired lock "refresh_cache-edef9245-4048-4ea4-90cc-ebed54498d88" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 924.687045] env[70020]: DEBUG nova.network.neutron [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 924.700209] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8e5e3c9a-8f68-4dc3-b34f-bbdf12f20975 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 924.720233] env[70020]: DEBUG oslo_vmware.api [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618496, 'name': Rename_Task, 'duration_secs': 0.305021} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.720233] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 924.720233] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fb081d2f-7e73-4011-9cd2-6a7498f931d3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.743533] env[70020]: DEBUG oslo_vmware.api [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 924.743533] env[70020]: value = "task-3618497" [ 924.743533] env[70020]: _type = "Task" [ 924.743533] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.755498] env[70020]: DEBUG oslo_vmware.api [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618497, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.826778] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618494, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.888949] env[70020]: DEBUG oslo_vmware.api [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618482, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.051811} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.889368] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/038d3b5b-38fb-498f-b4cc-5ed167e098c3/038d3b5b-38fb-498f-b4cc-5ed167e098c3.vmdk to [datastore2] 8317f386-44d0-4b1b-8590-d0336fafac21/8317f386-44d0-4b1b-8590-d0336fafac21.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 924.890635] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2c3ec31-e165-4388-8efb-e7ab746a9568 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.920693] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Reconfiguring VM instance instance-00000046 to attach disk [datastore2] 8317f386-44d0-4b1b-8590-d0336fafac21/8317f386-44d0-4b1b-8590-d0336fafac21.vmdk or device None with type streamOptimized {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 924.921833] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dbb000a3-9a34-4fa8-ad22-35f935e75cae {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.942769] env[70020]: DEBUG nova.compute.utils [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 924.948017] env[70020]: DEBUG oslo_vmware.api [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618495, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.021013} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.948017] env[70020]: DEBUG nova.compute.manager [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Not allocating networking since 'none' was specified. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 924.948775] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 45926a02-d0fe-4274-ba47-b97b3e12e4cd/45926a02-d0fe-4274-ba47-b97b3e12e4cd.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 924.949056] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 924.949220] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0d34a407-9612-4c4b-9a3b-7531d7055f2a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.959693] env[70020]: DEBUG oslo_vmware.api [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 924.959693] env[70020]: value = "task-3618498" [ 924.959693] env[70020]: _type = "Task" [ 924.959693] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.961684] env[70020]: DEBUG oslo_vmware.api [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Waiting for the task: (returnval){ [ 924.961684] env[70020]: value = "task-3618499" [ 924.961684] env[70020]: _type = "Task" [ 924.961684] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.978566] env[70020]: DEBUG oslo_vmware.api [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618499, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.981589] env[70020]: DEBUG oslo_vmware.api [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618498, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.987763] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b9a07698-27fb-4a3a-a88f-9308c931d907 tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Lock "1d9218db-05d8-4e33-837f-e9865946237f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.849s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 925.030941] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 01773af2-4ce2-4d2a-b334-ab99348000a5] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 925.216829] env[70020]: DEBUG nova.network.neutron [req-bcde3967-698c-4c38-a414-50afb081ab09 req-94b4ad38-7dc7-45a5-a036-570abe3aa445 service nova] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Updated VIF entry in instance network info cache for port 3a364f68-e0c0-48ae-ab70-f576cd06610e. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 925.216829] env[70020]: DEBUG nova.network.neutron [req-bcde3967-698c-4c38-a414-50afb081ab09 req-94b4ad38-7dc7-45a5-a036-570abe3aa445 service nova] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Updating instance_info_cache with network_info: [{"id": "3a364f68-e0c0-48ae-ab70-f576cd06610e", "address": "fa:16:3e:27:9a:59", "network": {"id": "28d7e64a-7917-46ed-91bf-c239aad81e05", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-285076897-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f14ea4f517a04de69f8bc56a19f2be8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "30c39e9a-a798-4f25-a48c-91f786ba332c", "external-id": "nsx-vlan-transportzone-438", "segmentation_id": 438, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a364f68-e0", "ovs_interfaceid": "3a364f68-e0c0-48ae-ab70-f576cd06610e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.257815] env[70020]: DEBUG oslo_vmware.api [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618497, 'name': PowerOnVM_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.258902] env[70020]: DEBUG nova.network.neutron [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 925.262319] env[70020]: DEBUG nova.network.neutron [-] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.326618] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618494, 'name': CreateVM_Task, 'duration_secs': 1.049199} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.326878] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 925.327501] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.328549] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 925.328549] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 925.328549] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72b3852c-7406-4bc4-b1f5-f753079f82ff {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.334795] env[70020]: DEBUG oslo_vmware.api [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Waiting for the task: (returnval){ [ 925.334795] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]525a20d0-0dd2-1d3f-2d92-ba5f341a71fe" [ 925.334795] env[70020]: _type = "Task" [ 925.334795] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.344947] env[70020]: DEBUG oslo_vmware.api [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]525a20d0-0dd2-1d3f-2d92-ba5f341a71fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.452012] env[70020]: DEBUG nova.compute.manager [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 925.473787] env[70020]: DEBUG oslo_vmware.api [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618498, 'name': ReconfigVM_Task, 'duration_secs': 0.379129} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.477021] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Reconfigured VM instance instance-00000046 to attach disk [datastore2] 8317f386-44d0-4b1b-8590-d0336fafac21/8317f386-44d0-4b1b-8590-d0336fafac21.vmdk or device None with type streamOptimized {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 925.477021] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e11d335a-aef6-4c51-9a44-5901703bcc59 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.483818] env[70020]: DEBUG oslo_vmware.api [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618499, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087686} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.483818] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 925.483818] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2576164a-931f-47cb-bbaa-b3d66185e72b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.489056] env[70020]: DEBUG oslo_vmware.api [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 925.489056] env[70020]: value = "task-3618500" [ 925.489056] env[70020]: _type = "Task" [ 925.489056] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.508188] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Reconfiguring VM instance instance-00000049 to attach disk [datastore2] 45926a02-d0fe-4274-ba47-b97b3e12e4cd/45926a02-d0fe-4274-ba47-b97b3e12e4cd.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 925.516345] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-15474e44-77df-45f4-8d59-2f788b5e7f46 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.540503] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 925.540503] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Cleaning up deleted instances with incomplete migration {{(pid=70020) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11872}} [ 925.547959] env[70020]: DEBUG oslo_vmware.api [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618500, 'name': Rename_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.550933] env[70020]: DEBUG oslo_vmware.api [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Waiting for the task: (returnval){ [ 925.550933] env[70020]: value = "task-3618501" [ 925.550933] env[70020]: _type = "Task" [ 925.550933] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.560525] env[70020]: DEBUG oslo_vmware.api [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618501, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.591898] env[70020]: DEBUG nova.network.neutron [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Updating instance_info_cache with network_info: [{"id": "ad1f3bb4-6fad-4f75-ad98-b53b3676f111", "address": "fa:16:3e:1a:7c:a3", "network": {"id": "28d7e64a-7917-46ed-91bf-c239aad81e05", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-285076897-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f14ea4f517a04de69f8bc56a19f2be8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "30c39e9a-a798-4f25-a48c-91f786ba332c", "external-id": "nsx-vlan-transportzone-438", "segmentation_id": 438, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad1f3bb4-6f", "ovs_interfaceid": "ad1f3bb4-6fad-4f75-ad98-b53b3676f111", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.718652] env[70020]: DEBUG oslo_concurrency.lockutils [req-bcde3967-698c-4c38-a414-50afb081ab09 req-94b4ad38-7dc7-45a5-a036-570abe3aa445 service nova] Releasing lock "refresh_cache-7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 925.718914] env[70020]: DEBUG nova.compute.manager [req-bcde3967-698c-4c38-a414-50afb081ab09 req-94b4ad38-7dc7-45a5-a036-570abe3aa445 service nova] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Received event network-vif-deleted-6d51b5bb-0659-4302-96ab-4991ce36e722 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 925.761155] env[70020]: DEBUG oslo_vmware.api [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618497, 'name': PowerOnVM_Task, 'duration_secs': 0.56727} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.761539] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 925.761890] env[70020]: INFO nova.compute.manager [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Took 11.67 seconds to spawn the instance on the hypervisor. [ 925.762184] env[70020]: DEBUG nova.compute.manager [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 925.763398] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78c69053-1c23-4b3c-bcc7-03ef929059a7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.770189] env[70020]: INFO nova.compute.manager [-] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Took 1.28 seconds to deallocate network for instance. [ 925.846569] env[70020]: DEBUG oslo_vmware.api [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]525a20d0-0dd2-1d3f-2d92-ba5f341a71fe, 'name': SearchDatastore_Task, 'duration_secs': 0.0113} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.849874] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 925.850258] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 925.851969] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.851969] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 925.851969] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 925.851969] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-85e2f36c-7520-41b0-ad92-265f7ef16195 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.862109] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 925.862326] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 925.863169] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8d6c6ae-a4b4-4b0b-a87a-55bace655a2a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.870717] env[70020]: DEBUG oslo_vmware.api [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Waiting for the task: (returnval){ [ 925.870717] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52a74d88-93ed-4275-a457-0ad4acc71dcb" [ 925.870717] env[70020]: _type = "Task" [ 925.870717] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.885538] env[70020]: DEBUG oslo_vmware.api [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52a74d88-93ed-4275-a457-0ad4acc71dcb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.010777] env[70020]: DEBUG oslo_vmware.api [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618500, 'name': Rename_Task, 'duration_secs': 0.196516} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.013526] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 926.014259] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3a43892f-06d8-4447-bf8d-7c0793487c62 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.022675] env[70020]: DEBUG oslo_vmware.api [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 926.022675] env[70020]: value = "task-3618502" [ 926.022675] env[70020]: _type = "Task" [ 926.022675] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.035682] env[70020]: DEBUG oslo_vmware.api [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618502, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.046108] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 926.063241] env[70020]: DEBUG oslo_vmware.api [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618501, 'name': ReconfigVM_Task, 'duration_secs': 0.353496} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.063514] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Reconfigured VM instance instance-00000049 to attach disk [datastore2] 45926a02-d0fe-4274-ba47-b97b3e12e4cd/45926a02-d0fe-4274-ba47-b97b3e12e4cd.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 926.064447] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7342c3d6-fb61-4e64-9890-0917e9f33194 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.073466] env[70020]: DEBUG oslo_vmware.api [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Waiting for the task: (returnval){ [ 926.073466] env[70020]: value = "task-3618503" [ 926.073466] env[70020]: _type = "Task" [ 926.073466] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.085504] env[70020]: DEBUG oslo_vmware.api [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618503, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.098289] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Releasing lock "refresh_cache-edef9245-4048-4ea4-90cc-ebed54498d88" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 926.098603] env[70020]: DEBUG nova.compute.manager [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Instance network_info: |[{"id": "ad1f3bb4-6fad-4f75-ad98-b53b3676f111", "address": "fa:16:3e:1a:7c:a3", "network": {"id": "28d7e64a-7917-46ed-91bf-c239aad81e05", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-285076897-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f14ea4f517a04de69f8bc56a19f2be8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "30c39e9a-a798-4f25-a48c-91f786ba332c", "external-id": "nsx-vlan-transportzone-438", "segmentation_id": 438, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad1f3bb4-6f", "ovs_interfaceid": "ad1f3bb4-6fad-4f75-ad98-b53b3676f111", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 926.099013] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1a:7c:a3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '30c39e9a-a798-4f25-a48c-91f786ba332c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ad1f3bb4-6fad-4f75-ad98-b53b3676f111', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 926.107816] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 926.108099] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 926.108314] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2a89bb4a-cd49-4771-8732-49190c06184a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.136595] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 926.136595] env[70020]: value = "task-3618504" [ 926.136595] env[70020]: _type = "Task" [ 926.136595] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.141353] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e927026-9ebf-4f47-8cf7-e62023f3b885 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.151332] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618504, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.154348] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4460c0be-2495-45e9-a04a-b3633a3a7ac3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.188277] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdb4b0ce-4c37-43c5-8371-245a969404ea {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.198255] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3760de7c-d8a6-40e7-8df3-872562dc7473 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.215116] env[70020]: DEBUG nova.compute.provider_tree [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 926.282320] env[70020]: DEBUG oslo_concurrency.lockutils [None req-57a59576-14ab-49c9-9c2b-c87f4b756353 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 926.292235] env[70020]: INFO nova.compute.manager [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Took 55.85 seconds to build instance. [ 926.386026] env[70020]: DEBUG oslo_vmware.api [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52a74d88-93ed-4275-a457-0ad4acc71dcb, 'name': SearchDatastore_Task, 'duration_secs': 0.013659} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.386995] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80c525c9-da18-4dff-8173-8bbfb2594b9c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.393991] env[70020]: DEBUG oslo_vmware.api [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Waiting for the task: (returnval){ [ 926.393991] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52cbcb79-8d18-79c3-d0cc-492baf87aaa2" [ 926.393991] env[70020]: _type = "Task" [ 926.393991] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.403274] env[70020]: DEBUG oslo_vmware.api [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52cbcb79-8d18-79c3-d0cc-492baf87aaa2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.465166] env[70020]: DEBUG nova.compute.manager [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 926.492891] env[70020]: DEBUG nova.virt.hardware [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 926.493281] env[70020]: DEBUG nova.virt.hardware [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 926.493481] env[70020]: DEBUG nova.virt.hardware [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 926.493753] env[70020]: DEBUG nova.virt.hardware [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 926.493927] env[70020]: DEBUG nova.virt.hardware [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 926.494127] env[70020]: DEBUG nova.virt.hardware [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 926.494400] env[70020]: DEBUG nova.virt.hardware [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 926.494621] env[70020]: DEBUG nova.virt.hardware [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 926.494804] env[70020]: DEBUG nova.virt.hardware [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 926.494967] env[70020]: DEBUG nova.virt.hardware [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 926.495182] env[70020]: DEBUG nova.virt.hardware [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 926.496200] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b773dcec-a7b5-4007-bce9-f15f2f64b5d0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.507030] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2de488aa-f7a9-4569-8add-e481353bca93 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.527979] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Instance VIF info [] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 926.534399] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Creating folder: Project (d5f2e2fa253c4364aeeb52de20cc52d5). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 926.539302] env[70020]: DEBUG nova.compute.manager [req-a42cc766-e0ca-4f25-8aa6-c9c0a63f1d59 req-e0bd1b10-c118-4a4b-ad1f-f2b3de421dce service nova] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Received event network-vif-plugged-ad1f3bb4-6fad-4f75-ad98-b53b3676f111 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 926.539534] env[70020]: DEBUG oslo_concurrency.lockutils [req-a42cc766-e0ca-4f25-8aa6-c9c0a63f1d59 req-e0bd1b10-c118-4a4b-ad1f-f2b3de421dce service nova] Acquiring lock "edef9245-4048-4ea4-90cc-ebed54498d88-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 926.539766] env[70020]: DEBUG oslo_concurrency.lockutils [req-a42cc766-e0ca-4f25-8aa6-c9c0a63f1d59 req-e0bd1b10-c118-4a4b-ad1f-f2b3de421dce service nova] Lock "edef9245-4048-4ea4-90cc-ebed54498d88-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 926.539956] env[70020]: DEBUG oslo_concurrency.lockutils [req-a42cc766-e0ca-4f25-8aa6-c9c0a63f1d59 req-e0bd1b10-c118-4a4b-ad1f-f2b3de421dce service nova] Lock "edef9245-4048-4ea4-90cc-ebed54498d88-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 926.540187] env[70020]: DEBUG nova.compute.manager [req-a42cc766-e0ca-4f25-8aa6-c9c0a63f1d59 req-e0bd1b10-c118-4a4b-ad1f-f2b3de421dce service nova] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] No waiting events found dispatching network-vif-plugged-ad1f3bb4-6fad-4f75-ad98-b53b3676f111 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 926.540322] env[70020]: WARNING nova.compute.manager [req-a42cc766-e0ca-4f25-8aa6-c9c0a63f1d59 req-e0bd1b10-c118-4a4b-ad1f-f2b3de421dce service nova] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Received unexpected event network-vif-plugged-ad1f3bb4-6fad-4f75-ad98-b53b3676f111 for instance with vm_state building and task_state spawning. [ 926.540483] env[70020]: DEBUG nova.compute.manager [req-a42cc766-e0ca-4f25-8aa6-c9c0a63f1d59 req-e0bd1b10-c118-4a4b-ad1f-f2b3de421dce service nova] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Received event network-changed-ad1f3bb4-6fad-4f75-ad98-b53b3676f111 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 926.540645] env[70020]: DEBUG nova.compute.manager [req-a42cc766-e0ca-4f25-8aa6-c9c0a63f1d59 req-e0bd1b10-c118-4a4b-ad1f-f2b3de421dce service nova] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Refreshing instance network info cache due to event network-changed-ad1f3bb4-6fad-4f75-ad98-b53b3676f111. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 926.540917] env[70020]: DEBUG oslo_concurrency.lockutils [req-a42cc766-e0ca-4f25-8aa6-c9c0a63f1d59 req-e0bd1b10-c118-4a4b-ad1f-f2b3de421dce service nova] Acquiring lock "refresh_cache-edef9245-4048-4ea4-90cc-ebed54498d88" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.540956] env[70020]: DEBUG oslo_concurrency.lockutils [req-a42cc766-e0ca-4f25-8aa6-c9c0a63f1d59 req-e0bd1b10-c118-4a4b-ad1f-f2b3de421dce service nova] Acquired lock "refresh_cache-edef9245-4048-4ea4-90cc-ebed54498d88" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 926.541166] env[70020]: DEBUG nova.network.neutron [req-a42cc766-e0ca-4f25-8aa6-c9c0a63f1d59 req-e0bd1b10-c118-4a4b-ad1f-f2b3de421dce service nova] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Refreshing network info cache for port ad1f3bb4-6fad-4f75-ad98-b53b3676f111 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 926.543334] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a011ddc1-b65f-42c1-baa3-03acf46a2faf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.556861] env[70020]: DEBUG oslo_vmware.api [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618502, 'name': PowerOnVM_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.565438] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Created folder: Project (d5f2e2fa253c4364aeeb52de20cc52d5) in parent group-v721521. [ 926.565876] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Creating folder: Instances. Parent ref: group-v721731. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 926.566278] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d3e2e774-f9c2-4c7d-bb65-fbe3e9833ba1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.588402] env[70020]: DEBUG oslo_vmware.api [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618503, 'name': Rename_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.589882] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Created folder: Instances in parent group-v721731. [ 926.590197] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 926.590386] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 926.590602] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0a2f6b47-4d7b-44b5-89c8-e65a0b6b549e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.615457] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 926.615457] env[70020]: value = "task-3618507" [ 926.615457] env[70020]: _type = "Task" [ 926.615457] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.626485] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618507, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.647618] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618504, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.719395] env[70020]: DEBUG nova.scheduler.client.report [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 926.795062] env[70020]: DEBUG oslo_concurrency.lockutils [None req-02f7d238-57c7-4c6c-b16c-f336346d61a3 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "a8982c31-ea86-4a8d-b8c6-006263ef41f8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.628s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 926.904476] env[70020]: DEBUG oslo_vmware.api [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52cbcb79-8d18-79c3-d0cc-492baf87aaa2, 'name': SearchDatastore_Task, 'duration_secs': 0.01516} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.904748] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 926.905014] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6/7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 926.905356] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-45e38969-2e3e-467a-bce0-769706fc3d9b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.912405] env[70020]: DEBUG oslo_vmware.api [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Waiting for the task: (returnval){ [ 926.912405] env[70020]: value = "task-3618508" [ 926.912405] env[70020]: _type = "Task" [ 926.912405] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.920036] env[70020]: DEBUG oslo_vmware.api [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618508, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.036272] env[70020]: DEBUG oslo_vmware.api [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618502, 'name': PowerOnVM_Task, 'duration_secs': 0.606638} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.036685] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 927.036928] env[70020]: INFO nova.compute.manager [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Took 15.71 seconds to spawn the instance on the hypervisor. [ 927.037207] env[70020]: DEBUG nova.compute.manager [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 927.038197] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1fbdddf-aa74-4ae8-b337-93b388272f3f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.084930] env[70020]: DEBUG oslo_vmware.api [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618503, 'name': Rename_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.132706] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618507, 'name': CreateVM_Task} progress is 25%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.150106] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618504, 'name': CreateVM_Task, 'duration_secs': 0.546377} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.150313] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 927.151065] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.151247] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 927.151594] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 927.151864] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-470dd507-20c0-4272-83f6-177cfe8a8ab0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.156830] env[70020]: DEBUG oslo_vmware.api [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Waiting for the task: (returnval){ [ 927.156830] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52bc05d4-2b8f-a621-51f5-a7e2dba52732" [ 927.156830] env[70020]: _type = "Task" [ 927.156830] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.167501] env[70020]: DEBUG oslo_vmware.api [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52bc05d4-2b8f-a621-51f5-a7e2dba52732, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.228346] env[70020]: DEBUG oslo_concurrency.lockutils [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.797s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 927.228346] env[70020]: DEBUG nova.compute.manager [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 927.233162] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.185s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 927.300372] env[70020]: DEBUG nova.compute.manager [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 927.427886] env[70020]: DEBUG oslo_vmware.api [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618508, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.430335] env[70020]: DEBUG nova.network.neutron [req-a42cc766-e0ca-4f25-8aa6-c9c0a63f1d59 req-e0bd1b10-c118-4a4b-ad1f-f2b3de421dce service nova] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Updated VIF entry in instance network info cache for port ad1f3bb4-6fad-4f75-ad98-b53b3676f111. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 927.431516] env[70020]: DEBUG nova.network.neutron [req-a42cc766-e0ca-4f25-8aa6-c9c0a63f1d59 req-e0bd1b10-c118-4a4b-ad1f-f2b3de421dce service nova] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Updating instance_info_cache with network_info: [{"id": "ad1f3bb4-6fad-4f75-ad98-b53b3676f111", "address": "fa:16:3e:1a:7c:a3", "network": {"id": "28d7e64a-7917-46ed-91bf-c239aad81e05", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-285076897-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f14ea4f517a04de69f8bc56a19f2be8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "30c39e9a-a798-4f25-a48c-91f786ba332c", "external-id": "nsx-vlan-transportzone-438", "segmentation_id": 438, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad1f3bb4-6f", "ovs_interfaceid": "ad1f3bb4-6fad-4f75-ad98-b53b3676f111", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.560972] env[70020]: INFO nova.compute.manager [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Took 64.56 seconds to build instance. [ 927.589250] env[70020]: DEBUG oslo_vmware.api [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618503, 'name': Rename_Task, 'duration_secs': 1.182542} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.592885] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 927.593926] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6e3b401b-471e-45d0-aa00-6b723da536d1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.601036] env[70020]: DEBUG oslo_vmware.api [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Waiting for the task: (returnval){ [ 927.601036] env[70020]: value = "task-3618509" [ 927.601036] env[70020]: _type = "Task" [ 927.601036] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.614187] env[70020]: DEBUG oslo_vmware.api [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618509, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.628968] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618507, 'name': CreateVM_Task, 'duration_secs': 0.911316} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.629172] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 927.629585] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.670537] env[70020]: DEBUG oslo_vmware.api [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52bc05d4-2b8f-a621-51f5-a7e2dba52732, 'name': SearchDatastore_Task, 'duration_secs': 0.020578} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.670873] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 927.671147] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 927.671461] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.671644] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 927.671866] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 927.674764] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 927.675105] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 927.675355] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1c8444a9-1368-4105-88e0-093f524b7232 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.677305] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f390a56-f888-406a-8b0b-c94232f7e567 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.682748] env[70020]: DEBUG oslo_vmware.api [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Waiting for the task: (returnval){ [ 927.682748] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52ac4065-0bae-a488-e35b-45ec5819af80" [ 927.682748] env[70020]: _type = "Task" [ 927.682748] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.686864] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 927.687055] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 927.688057] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e801d2c1-05be-4eb6-9346-f96dadc04de6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.695919] env[70020]: DEBUG oslo_vmware.api [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ac4065-0bae-a488-e35b-45ec5819af80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.701022] env[70020]: DEBUG oslo_vmware.api [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Waiting for the task: (returnval){ [ 927.701022] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52a99b96-7ce0-1624-18be-0430cbb00a91" [ 927.701022] env[70020]: _type = "Task" [ 927.701022] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.709316] env[70020]: DEBUG oslo_vmware.api [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52a99b96-7ce0-1624-18be-0430cbb00a91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.739276] env[70020]: DEBUG nova.compute.utils [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 927.744583] env[70020]: DEBUG nova.compute.manager [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 927.744855] env[70020]: DEBUG nova.network.neutron [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 927.805804] env[70020]: DEBUG nova.policy [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7b543e081f574f1f85874775a734a0a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4e3eae740ef84ef88aef113ed4d6e57b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 927.828634] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 927.923132] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-898c8622-a8e9-42f2-83e3-6f4f0effb0e1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.936501] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e175fcdc-6927-445c-aad9-2fc5a245162a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.940928] env[70020]: DEBUG oslo_concurrency.lockutils [req-a42cc766-e0ca-4f25-8aa6-c9c0a63f1d59 req-e0bd1b10-c118-4a4b-ad1f-f2b3de421dce service nova] Releasing lock "refresh_cache-edef9245-4048-4ea4-90cc-ebed54498d88" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 927.944023] env[70020]: DEBUG nova.compute.manager [req-a42cc766-e0ca-4f25-8aa6-c9c0a63f1d59 req-e0bd1b10-c118-4a4b-ad1f-f2b3de421dce service nova] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Received event network-vif-deleted-4896df9a-0702-4071-8432-b95ec01f1d13 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 927.944023] env[70020]: DEBUG oslo_vmware.api [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618508, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.746009} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.944023] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6/7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 927.944023] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 927.944023] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6644c8f3-01f4-4fbf-9e8e-abfa2b7cdf03 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.976786] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a50c3210-2fd0-4ee7-bf9f-bf33f59117c5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.981793] env[70020]: DEBUG oslo_vmware.api [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Waiting for the task: (returnval){ [ 927.981793] env[70020]: value = "task-3618510" [ 927.981793] env[70020]: _type = "Task" [ 927.981793] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.991364] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe7ac81a-f2a7-43a6-985c-4622ab4ec60f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.000589] env[70020]: DEBUG oslo_vmware.api [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618510, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.011280] env[70020]: DEBUG nova.compute.provider_tree [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 928.063893] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73670647-5313-4081-bfa7-4e08399dad5a tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "8317f386-44d0-4b1b-8590-d0336fafac21" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 89.827s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 928.111584] env[70020]: DEBUG oslo_vmware.api [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618509, 'name': PowerOnVM_Task, 'duration_secs': 0.509665} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.111944] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 928.112186] env[70020]: INFO nova.compute.manager [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Took 10.22 seconds to spawn the instance on the hypervisor. [ 928.112397] env[70020]: DEBUG nova.compute.manager [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 928.113338] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83e1e3f0-8993-4cbf-8426-05ac71089bb5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.195323] env[70020]: DEBUG oslo_vmware.api [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ac4065-0bae-a488-e35b-45ec5819af80, 'name': SearchDatastore_Task, 'duration_secs': 0.023618} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.195638] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 928.195890] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 928.196132] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.205798] env[70020]: DEBUG oslo_vmware.rw_handles [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a4b1f2-1352-14ae-70ff-2cd3a1b6e13e/disk-0.vmdk. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 928.206649] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c30e6d0-2563-43a7-abee-212660465166 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.212753] env[70020]: DEBUG oslo_vmware.api [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52a99b96-7ce0-1624-18be-0430cbb00a91, 'name': SearchDatastore_Task, 'duration_secs': 0.013546} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.213986] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f453a78-0a96-4f08-b320-1443300254cb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.217916] env[70020]: DEBUG oslo_vmware.rw_handles [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a4b1f2-1352-14ae-70ff-2cd3a1b6e13e/disk-0.vmdk is in state: ready. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 928.217916] env[70020]: ERROR oslo_vmware.rw_handles [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a4b1f2-1352-14ae-70ff-2cd3a1b6e13e/disk-0.vmdk due to incomplete transfer. [ 928.218477] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-85ce22ce-a56d-4375-8658-beb78d0782f6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.221414] env[70020]: DEBUG oslo_vmware.api [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Waiting for the task: (returnval){ [ 928.221414] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]521c2790-c587-0920-fbd2-15df6229f096" [ 928.221414] env[70020]: _type = "Task" [ 928.221414] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.226080] env[70020]: DEBUG oslo_vmware.rw_handles [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a4b1f2-1352-14ae-70ff-2cd3a1b6e13e/disk-0.vmdk. {{(pid=70020) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 928.226319] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Uploaded image c5e24af1-d7e3-4c3c-af61-b7b1976b5f88 to the Glance image server {{(pid=70020) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 928.228053] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Destroying the VM {{(pid=70020) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 928.231014] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-a65ed2c3-03df-4ef5-95cf-35f5f324a5dd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.232439] env[70020]: DEBUG oslo_vmware.api [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]521c2790-c587-0920-fbd2-15df6229f096, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.236805] env[70020]: DEBUG oslo_vmware.api [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Waiting for the task: (returnval){ [ 928.236805] env[70020]: value = "task-3618511" [ 928.236805] env[70020]: _type = "Task" [ 928.236805] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.244687] env[70020]: DEBUG oslo_vmware.api [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618511, 'name': Destroy_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.249313] env[70020]: DEBUG nova.compute.manager [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 928.285677] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "a8982c31-ea86-4a8d-b8c6-006263ef41f8" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 928.285933] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "a8982c31-ea86-4a8d-b8c6-006263ef41f8" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 928.286123] env[70020]: INFO nova.compute.manager [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Shelving [ 928.404420] env[70020]: DEBUG nova.network.neutron [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Successfully created port: d1869b01-6eea-468c-ac71-153c8eeda8ca {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 928.493310] env[70020]: DEBUG oslo_vmware.api [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618510, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081866} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.493663] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 928.494525] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1b8a8a3-27ff-4415-936c-045af7a43009 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.517088] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6/7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 928.518124] env[70020]: DEBUG nova.scheduler.client.report [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 928.521510] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e786d12e-59b6-447f-9f4d-2223689f40e1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.548432] env[70020]: DEBUG oslo_vmware.api [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Waiting for the task: (returnval){ [ 928.548432] env[70020]: value = "task-3618512" [ 928.548432] env[70020]: _type = "Task" [ 928.548432] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.557690] env[70020]: DEBUG oslo_vmware.api [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618512, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.566814] env[70020]: DEBUG nova.compute.manager [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 928.637963] env[70020]: INFO nova.compute.manager [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Took 52.55 seconds to build instance. [ 928.734512] env[70020]: DEBUG oslo_vmware.api [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]521c2790-c587-0920-fbd2-15df6229f096, 'name': SearchDatastore_Task, 'duration_secs': 0.032492} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.735086] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 928.735590] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] edef9245-4048-4ea4-90cc-ebed54498d88/edef9245-4048-4ea4-90cc-ebed54498d88.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 928.736121] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 928.736492] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 928.736929] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-49a23284-4f12-469b-b51b-aa788fbb4444 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.739782] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b6dc64e9-3c3d-408b-839a-b23efc09584d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.753320] env[70020]: DEBUG oslo_vmware.api [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Waiting for the task: (returnval){ [ 928.753320] env[70020]: value = "task-3618513" [ 928.753320] env[70020]: _type = "Task" [ 928.753320] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.766251] env[70020]: DEBUG oslo_vmware.api [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618511, 'name': Destroy_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.770390] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 928.770663] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 928.771547] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a1de110-5b81-4a0e-918a-f77338f56f03 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.779210] env[70020]: DEBUG oslo_vmware.api [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618513, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.780991] env[70020]: DEBUG oslo_vmware.api [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Waiting for the task: (returnval){ [ 928.780991] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52ad664b-0bea-b234-6ddc-94e6a5b231b6" [ 928.780991] env[70020]: _type = "Task" [ 928.780991] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.790820] env[70020]: DEBUG oslo_vmware.api [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ad664b-0bea-b234-6ddc-94e6a5b231b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.043694] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.810s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 929.043694] env[70020]: INFO nova.compute.manager [None req-5a318965-fcd3-482d-ac34-c3c6df92c2f2 tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Successfully reverted task state from rebuilding on failure for instance. [ 929.055593] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f6ca5bd7-49fa-4368-9e8a-1a667abea13b tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.943s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 929.055593] env[70020]: DEBUG nova.objects.instance [None req-f6ca5bd7-49fa-4368-9e8a-1a667abea13b tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Lazy-loading 'resources' on Instance uuid 6f2bc97b-0f0a-4f16-b41c-7af96130783f {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 929.056034] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "40fa0339-c221-4841-9444-dc957a95cf3b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 929.056326] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "40fa0339-c221-4841-9444-dc957a95cf3b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 929.066881] env[70020]: DEBUG oslo_vmware.api [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618512, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.095238] env[70020]: DEBUG oslo_concurrency.lockutils [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 929.140378] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e3dad012-c5e9-4ef0-9640-3ca021447407 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Lock "45926a02-d0fe-4274-ba47-b97b3e12e4cd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.503s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 929.258580] env[70020]: DEBUG oslo_vmware.api [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618511, 'name': Destroy_Task} progress is 33%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.268301] env[70020]: DEBUG nova.compute.manager [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 929.280492] env[70020]: DEBUG oslo_vmware.api [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618513, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.293794] env[70020]: DEBUG oslo_vmware.api [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ad664b-0bea-b234-6ddc-94e6a5b231b6, 'name': SearchDatastore_Task, 'duration_secs': 0.018995} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.297244] env[70020]: DEBUG nova.virt.hardware [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 929.297425] env[70020]: DEBUG nova.virt.hardware [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 929.297572] env[70020]: DEBUG nova.virt.hardware [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 929.297752] env[70020]: DEBUG nova.virt.hardware [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 929.297927] env[70020]: DEBUG nova.virt.hardware [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 929.298072] env[70020]: DEBUG nova.virt.hardware [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 929.298359] env[70020]: DEBUG nova.virt.hardware [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 929.298456] env[70020]: DEBUG nova.virt.hardware [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 929.298623] env[70020]: DEBUG nova.virt.hardware [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 929.298780] env[70020]: DEBUG nova.virt.hardware [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 929.298944] env[70020]: DEBUG nova.virt.hardware [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 929.300118] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 929.300793] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebc3c9da-7d33-4f28-a270-6ba2af878781 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.303864] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ddc3589f-0305-4ebe-93cf-50b1f971e5b2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.308022] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-52b3d0f0-dbfe-4ad4-83a5-e33b06b87b06 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.318337] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75decdcd-1366-42f5-9999-3ceec517a954 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.325236] env[70020]: DEBUG oslo_vmware.api [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Waiting for the task: (returnval){ [ 929.325236] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]526a7d1d-dea4-6fe9-bd10-e1f1d007c052" [ 929.325236] env[70020]: _type = "Task" [ 929.325236] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.325550] env[70020]: DEBUG oslo_vmware.api [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 929.325550] env[70020]: value = "task-3618514" [ 929.325550] env[70020]: _type = "Task" [ 929.325550] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.348198] env[70020]: DEBUG oslo_vmware.api [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]526a7d1d-dea4-6fe9-bd10-e1f1d007c052, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.351425] env[70020]: DEBUG oslo_vmware.api [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618514, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.557020] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e69847cc-877f-4186-afc9-55debae9dd34 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "8317f386-44d0-4b1b-8590-d0336fafac21" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 929.557307] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e69847cc-877f-4186-afc9-55debae9dd34 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "8317f386-44d0-4b1b-8590-d0336fafac21" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 929.557522] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e69847cc-877f-4186-afc9-55debae9dd34 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "8317f386-44d0-4b1b-8590-d0336fafac21-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 929.557709] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e69847cc-877f-4186-afc9-55debae9dd34 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "8317f386-44d0-4b1b-8590-d0336fafac21-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 929.557874] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e69847cc-877f-4186-afc9-55debae9dd34 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "8317f386-44d0-4b1b-8590-d0336fafac21-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 929.564910] env[70020]: INFO nova.compute.manager [None req-e69847cc-877f-4186-afc9-55debae9dd34 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Terminating instance [ 929.577948] env[70020]: DEBUG oslo_vmware.api [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618512, 'name': ReconfigVM_Task, 'duration_secs': 0.988761} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.577948] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Reconfigured VM instance instance-0000004a to attach disk [datastore2] 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6/7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 929.578557] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f1b83dae-43db-46d5-8ad8-e6901f0e2ae3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.585655] env[70020]: DEBUG oslo_vmware.api [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Waiting for the task: (returnval){ [ 929.585655] env[70020]: value = "task-3618515" [ 929.585655] env[70020]: _type = "Task" [ 929.585655] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.602899] env[70020]: DEBUG oslo_vmware.api [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618515, 'name': Rename_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.643062] env[70020]: DEBUG nova.compute.manager [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 929.757539] env[70020]: DEBUG oslo_vmware.api [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618511, 'name': Destroy_Task, 'duration_secs': 1.0797} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.757817] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Destroyed the VM [ 929.757878] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Deleting Snapshot of the VM instance {{(pid=70020) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 929.760662] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5d3a6c67-0fb2-4587-a48b-6717389cb9fa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.771817] env[70020]: DEBUG oslo_vmware.api [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Waiting for the task: (returnval){ [ 929.771817] env[70020]: value = "task-3618516" [ 929.771817] env[70020]: _type = "Task" [ 929.771817] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.775864] env[70020]: DEBUG oslo_vmware.api [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618513, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.817303} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.781982] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] edef9245-4048-4ea4-90cc-ebed54498d88/edef9245-4048-4ea4-90cc-ebed54498d88.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 929.782225] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 929.783606] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-12fd142a-7cf5-4256-866f-cd3162e466c0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.791569] env[70020]: DEBUG oslo_vmware.api [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618516, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.792868] env[70020]: DEBUG oslo_vmware.api [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Waiting for the task: (returnval){ [ 929.792868] env[70020]: value = "task-3618517" [ 929.792868] env[70020]: _type = "Task" [ 929.792868] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.804047] env[70020]: DEBUG oslo_vmware.api [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618517, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.840423] env[70020]: DEBUG oslo_vmware.api [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]526a7d1d-dea4-6fe9-bd10-e1f1d007c052, 'name': SearchDatastore_Task, 'duration_secs': 0.065413} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.843283] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 929.843565] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] ff4e958d-0068-429f-af76-5e7d4dd147f3/ff4e958d-0068-429f-af76-5e7d4dd147f3.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 929.844242] env[70020]: DEBUG oslo_vmware.api [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618514, 'name': PowerOffVM_Task, 'duration_secs': 0.324869} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.844443] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d4903587-e7fb-4379-8e44-46f5e21d9ac5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.846759] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 929.849940] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ec19ccf-6aac-421b-a258-b5df588979f4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.856984] env[70020]: DEBUG oslo_vmware.api [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Waiting for the task: (returnval){ [ 929.856984] env[70020]: value = "task-3618518" [ 929.856984] env[70020]: _type = "Task" [ 929.856984] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.878780] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02eac8b0-df93-41fb-90e9-bf485e17533d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.890577] env[70020]: DEBUG oslo_vmware.api [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': task-3618518, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.072993] env[70020]: DEBUG nova.compute.manager [None req-e69847cc-877f-4186-afc9-55debae9dd34 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 930.073338] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e69847cc-877f-4186-afc9-55debae9dd34 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 930.074865] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54038fea-2b15-4f5d-ad2d-db1f1adc3de1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.082626] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-e69847cc-877f-4186-afc9-55debae9dd34 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 930.082626] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d351aa3e-a767-4103-bb67-eb59f2c44742 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.097459] env[70020]: DEBUG oslo_vmware.api [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618515, 'name': Rename_Task, 'duration_secs': 0.173111} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.098913] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 930.099257] env[70020]: DEBUG oslo_vmware.api [None req-e69847cc-877f-4186-afc9-55debae9dd34 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 930.099257] env[70020]: value = "task-3618519" [ 930.099257] env[70020]: _type = "Task" [ 930.099257] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.102262] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0378e134-2b64-43bd-bf39-0e1e7eed216b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.115093] env[70020]: DEBUG oslo_vmware.api [None req-e69847cc-877f-4186-afc9-55debae9dd34 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618519, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.116544] env[70020]: DEBUG oslo_vmware.api [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Waiting for the task: (returnval){ [ 930.116544] env[70020]: value = "task-3618520" [ 930.116544] env[70020]: _type = "Task" [ 930.116544] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.120925] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4ecb1f6-bf72-424b-a486-cdaf053caf69 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.134150] env[70020]: DEBUG oslo_vmware.api [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618520, 'name': PowerOnVM_Task} progress is 33%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.137145] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55c89e48-dec0-48e8-9c37-d8932428a6e5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.177572] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e4f4f3f-7f81-441e-aed6-37db6a7bf0dc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.187482] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a81ea81b-7671-4f35-a56a-57cef5e49bf7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.192969] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 930.206763] env[70020]: DEBUG nova.compute.provider_tree [None req-f6ca5bd7-49fa-4368-9e8a-1a667abea13b tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 930.271253] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fc70e12b-61ca-4960-834d-ce5ce953214a tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Acquiring lock "c9ce57f3-f9a2-40aa-b7eb-403840c34304" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 930.271571] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fc70e12b-61ca-4960-834d-ce5ce953214a tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Lock "c9ce57f3-f9a2-40aa-b7eb-403840c34304" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 930.271796] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fc70e12b-61ca-4960-834d-ce5ce953214a tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Acquiring lock "c9ce57f3-f9a2-40aa-b7eb-403840c34304-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 930.271979] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fc70e12b-61ca-4960-834d-ce5ce953214a tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Lock "c9ce57f3-f9a2-40aa-b7eb-403840c34304-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 930.272158] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fc70e12b-61ca-4960-834d-ce5ce953214a tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Lock "c9ce57f3-f9a2-40aa-b7eb-403840c34304-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 930.274668] env[70020]: INFO nova.compute.manager [None req-fc70e12b-61ca-4960-834d-ce5ce953214a tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Terminating instance [ 930.287593] env[70020]: DEBUG oslo_vmware.api [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618516, 'name': RemoveSnapshot_Task} progress is 46%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.305544] env[70020]: DEBUG oslo_vmware.api [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618517, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.057605} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.305762] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 930.306588] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ff487c5-5bfa-47db-8d3e-a858b7ba8dc4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.330905] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Reconfiguring VM instance instance-0000004b to attach disk [datastore2] edef9245-4048-4ea4-90cc-ebed54498d88/edef9245-4048-4ea4-90cc-ebed54498d88.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 930.331837] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-12679505-6980-4bc7-b9ec-0df457591909 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.352872] env[70020]: DEBUG nova.network.neutron [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Successfully updated port: d1869b01-6eea-468c-ac71-153c8eeda8ca {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 930.363117] env[70020]: DEBUG oslo_vmware.api [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Waiting for the task: (returnval){ [ 930.363117] env[70020]: value = "task-3618521" [ 930.363117] env[70020]: _type = "Task" [ 930.363117] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.373229] env[70020]: DEBUG oslo_vmware.api [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618521, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.382458] env[70020]: DEBUG oslo_vmware.api [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': task-3618518, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.392329] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Creating Snapshot of the VM instance {{(pid=70020) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 930.392631] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-259125c5-bbfe-447e-9308-072303643639 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.400050] env[70020]: DEBUG oslo_vmware.api [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 930.400050] env[70020]: value = "task-3618522" [ 930.400050] env[70020]: _type = "Task" [ 930.400050] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.407674] env[70020]: DEBUG oslo_vmware.api [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618522, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.436802] env[70020]: DEBUG nova.compute.manager [req-e6b955d0-cdeb-4837-8352-5ce3659ad0be req-e94e886d-60ba-4a7a-9778-de0aaa24a166 service nova] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Received event network-vif-plugged-d1869b01-6eea-468c-ac71-153c8eeda8ca {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 930.436961] env[70020]: DEBUG oslo_concurrency.lockutils [req-e6b955d0-cdeb-4837-8352-5ce3659ad0be req-e94e886d-60ba-4a7a-9778-de0aaa24a166 service nova] Acquiring lock "3a4f2342-58e7-436b-a779-0fa093b52409-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 930.437214] env[70020]: DEBUG oslo_concurrency.lockutils [req-e6b955d0-cdeb-4837-8352-5ce3659ad0be req-e94e886d-60ba-4a7a-9778-de0aaa24a166 service nova] Lock "3a4f2342-58e7-436b-a779-0fa093b52409-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 930.437876] env[70020]: DEBUG oslo_concurrency.lockutils [req-e6b955d0-cdeb-4837-8352-5ce3659ad0be req-e94e886d-60ba-4a7a-9778-de0aaa24a166 service nova] Lock "3a4f2342-58e7-436b-a779-0fa093b52409-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 930.437876] env[70020]: DEBUG nova.compute.manager [req-e6b955d0-cdeb-4837-8352-5ce3659ad0be req-e94e886d-60ba-4a7a-9778-de0aaa24a166 service nova] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] No waiting events found dispatching network-vif-plugged-d1869b01-6eea-468c-ac71-153c8eeda8ca {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 930.437876] env[70020]: WARNING nova.compute.manager [req-e6b955d0-cdeb-4837-8352-5ce3659ad0be req-e94e886d-60ba-4a7a-9778-de0aaa24a166 service nova] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Received unexpected event network-vif-plugged-d1869b01-6eea-468c-ac71-153c8eeda8ca for instance with vm_state building and task_state spawning. [ 930.613842] env[70020]: DEBUG oslo_vmware.api [None req-e69847cc-877f-4186-afc9-55debae9dd34 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618519, 'name': PowerOffVM_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.630210] env[70020]: DEBUG oslo_vmware.api [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618520, 'name': PowerOnVM_Task} progress is 33%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.712525] env[70020]: DEBUG nova.scheduler.client.report [None req-f6ca5bd7-49fa-4368-9e8a-1a667abea13b tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 930.784286] env[70020]: DEBUG nova.compute.manager [None req-fc70e12b-61ca-4960-834d-ce5ce953214a tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 930.784286] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fc70e12b-61ca-4960-834d-ce5ce953214a tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 930.785736] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1416a6bb-288b-40f7-a475-3f6aea4eee3c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.793737] env[70020]: DEBUG oslo_vmware.api [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618516, 'name': RemoveSnapshot_Task, 'duration_secs': 0.84008} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.794931] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Deleted Snapshot of the VM instance {{(pid=70020) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 930.795382] env[70020]: INFO nova.compute.manager [None req-2ab05779-3536-4e9c-becb-5c5408060c05 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Took 17.57 seconds to snapshot the instance on the hypervisor. [ 930.800446] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc70e12b-61ca-4960-834d-ce5ce953214a tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 930.800960] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0fafa7bb-e6b2-456d-bbf2-8bb13d3d6430 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.810025] env[70020]: DEBUG oslo_vmware.api [None req-fc70e12b-61ca-4960-834d-ce5ce953214a tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Waiting for the task: (returnval){ [ 930.810025] env[70020]: value = "task-3618523" [ 930.810025] env[70020]: _type = "Task" [ 930.810025] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.821437] env[70020]: DEBUG oslo_vmware.api [None req-fc70e12b-61ca-4960-834d-ce5ce953214a tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': task-3618523, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.856837] env[70020]: DEBUG oslo_concurrency.lockutils [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "refresh_cache-3a4f2342-58e7-436b-a779-0fa093b52409" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.857330] env[70020]: DEBUG oslo_concurrency.lockutils [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquired lock "refresh_cache-3a4f2342-58e7-436b-a779-0fa093b52409" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 930.857533] env[70020]: DEBUG nova.network.neutron [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 930.875332] env[70020]: DEBUG oslo_vmware.api [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618521, 'name': ReconfigVM_Task, 'duration_secs': 0.406085} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.879037] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Reconfigured VM instance instance-0000004b to attach disk [datastore2] edef9245-4048-4ea4-90cc-ebed54498d88/edef9245-4048-4ea4-90cc-ebed54498d88.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 930.880077] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6008b2fa-e86e-4b60-9e83-5a2acc45a6a9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.889295] env[70020]: DEBUG oslo_vmware.api [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': task-3618518, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.594884} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.890841] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] ff4e958d-0068-429f-af76-5e7d4dd147f3/ff4e958d-0068-429f-af76-5e7d4dd147f3.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 930.891117] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 930.891585] env[70020]: DEBUG oslo_vmware.api [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Waiting for the task: (returnval){ [ 930.891585] env[70020]: value = "task-3618524" [ 930.891585] env[70020]: _type = "Task" [ 930.891585] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.891808] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6fd8862c-5c8b-4579-be71-b4447d539ee1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.907277] env[70020]: DEBUG oslo_vmware.api [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618524, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.909345] env[70020]: DEBUG oslo_vmware.api [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Waiting for the task: (returnval){ [ 930.909345] env[70020]: value = "task-3618525" [ 930.909345] env[70020]: _type = "Task" [ 930.909345] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.917403] env[70020]: DEBUG oslo_vmware.api [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618522, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.924148] env[70020]: DEBUG oslo_vmware.api [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': task-3618525, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.116081] env[70020]: DEBUG oslo_vmware.api [None req-e69847cc-877f-4186-afc9-55debae9dd34 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618519, 'name': PowerOffVM_Task, 'duration_secs': 0.548423} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.116389] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-e69847cc-877f-4186-afc9-55debae9dd34 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 931.116579] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e69847cc-877f-4186-afc9-55debae9dd34 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 931.116905] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8ecc40a5-ca59-4682-9bd4-910d69610197 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.131688] env[70020]: DEBUG oslo_vmware.api [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618520, 'name': PowerOnVM_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.203817] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e69847cc-877f-4186-afc9-55debae9dd34 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 931.204106] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e69847cc-877f-4186-afc9-55debae9dd34 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 931.204304] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-e69847cc-877f-4186-afc9-55debae9dd34 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Deleting the datastore file [datastore2] 8317f386-44d0-4b1b-8590-d0336fafac21 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 931.204585] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cf92c1b1-1163-4e12-b17a-5b3706a47310 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.213329] env[70020]: DEBUG oslo_vmware.api [None req-e69847cc-877f-4186-afc9-55debae9dd34 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 931.213329] env[70020]: value = "task-3618527" [ 931.213329] env[70020]: _type = "Task" [ 931.213329] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.218049] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f6ca5bd7-49fa-4368-9e8a-1a667abea13b tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.164s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 931.220500] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 26.575s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 931.228651] env[70020]: DEBUG oslo_vmware.api [None req-e69847cc-877f-4186-afc9-55debae9dd34 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618527, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.330235] env[70020]: DEBUG oslo_vmware.api [None req-fc70e12b-61ca-4960-834d-ce5ce953214a tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': task-3618523, 'name': PowerOffVM_Task, 'duration_secs': 0.244841} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.330235] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc70e12b-61ca-4960-834d-ce5ce953214a tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 931.330235] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fc70e12b-61ca-4960-834d-ce5ce953214a tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 931.330235] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0f8c4aba-dc13-4dec-9d12-41e5070b1603 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.410453] env[70020]: DEBUG oslo_vmware.api [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618524, 'name': Rename_Task, 'duration_secs': 0.235133} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.410732] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fc70e12b-61ca-4960-834d-ce5ce953214a tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 931.414022] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fc70e12b-61ca-4960-834d-ce5ce953214a tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 931.414022] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc70e12b-61ca-4960-834d-ce5ce953214a tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Deleting the datastore file [datastore1] c9ce57f3-f9a2-40aa-b7eb-403840c34304 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 931.414022] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 931.414022] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5b929eef-d464-449e-b09b-beef16df5dae {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.422885] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b40c8b42-4c49-4696-9c34-75df87622500 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.424859] env[70020]: DEBUG oslo_vmware.api [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618522, 'name': CreateSnapshot_Task, 'duration_secs': 0.630325} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.425686] env[70020]: DEBUG nova.network.neutron [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 931.427836] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Created Snapshot of the VM instance {{(pid=70020) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 931.429194] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81a176f1-61ed-425a-aeb4-ed169a66166e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.435247] env[70020]: DEBUG oslo_vmware.api [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': task-3618525, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082553} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.439224] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 931.439610] env[70020]: DEBUG oslo_vmware.api [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Waiting for the task: (returnval){ [ 931.439610] env[70020]: value = "task-3618530" [ 931.439610] env[70020]: _type = "Task" [ 931.439610] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.439873] env[70020]: DEBUG oslo_vmware.api [None req-fc70e12b-61ca-4960-834d-ce5ce953214a tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Waiting for the task: (returnval){ [ 931.439873] env[70020]: value = "task-3618529" [ 931.439873] env[70020]: _type = "Task" [ 931.439873] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.442392] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08c6c608-b0a9-4aeb-b793-9e8049c06ece {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.479260] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Reconfiguring VM instance instance-0000004c to attach disk [datastore2] ff4e958d-0068-429f-af76-5e7d4dd147f3/ff4e958d-0068-429f-af76-5e7d4dd147f3.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 931.489129] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e38ba35f-ec7c-4b92-8e82-5abf99429bf5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.505507] env[70020]: DEBUG oslo_vmware.api [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618530, 'name': PowerOnVM_Task} progress is 33%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.505507] env[70020]: DEBUG oslo_vmware.api [None req-fc70e12b-61ca-4960-834d-ce5ce953214a tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': task-3618529, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.512710] env[70020]: DEBUG oslo_vmware.api [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Waiting for the task: (returnval){ [ 931.512710] env[70020]: value = "task-3618531" [ 931.512710] env[70020]: _type = "Task" [ 931.512710] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.524436] env[70020]: DEBUG oslo_vmware.api [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': task-3618531, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.632804] env[70020]: DEBUG oslo_vmware.api [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618520, 'name': PowerOnVM_Task, 'duration_secs': 1.071811} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.633151] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 931.633404] env[70020]: INFO nova.compute.manager [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Took 10.83 seconds to spawn the instance on the hypervisor. [ 931.633591] env[70020]: DEBUG nova.compute.manager [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 931.634399] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2936da4b-db4f-4a08-9865-f4743b6ca0a4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.726709] env[70020]: DEBUG nova.objects.instance [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Lazy-loading 'migration_context' on Instance uuid b53f55c1-1867-410c-9c53-f552ff30d697 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 931.732032] env[70020]: DEBUG oslo_vmware.api [None req-e69847cc-877f-4186-afc9-55debae9dd34 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618527, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.740581] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f6ca5bd7-49fa-4368-9e8a-1a667abea13b tempest-ServerActionsV293TestJSON-1916911121 tempest-ServerActionsV293TestJSON-1916911121-project-member] Lock "6f2bc97b-0f0a-4f16-b41c-7af96130783f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.239s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 931.953015] env[70020]: DEBUG nova.network.neutron [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Updating instance_info_cache with network_info: [{"id": "d1869b01-6eea-468c-ac71-153c8eeda8ca", "address": "fa:16:3e:e0:89:b7", "network": {"id": "ba2c4d3c-4191-4de5-8533-90ca5dfc1dc0", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-599216784-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e3eae740ef84ef88aef113ed4d6e57b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1869b01-6e", "ovs_interfaceid": "d1869b01-6eea-468c-ac71-153c8eeda8ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.968871] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Creating linked-clone VM from snapshot {{(pid=70020) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 931.969261] env[70020]: DEBUG oslo_vmware.api [None req-fc70e12b-61ca-4960-834d-ce5ce953214a tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Task: {'id': task-3618529, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.408774} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.973495] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-e44f7084-e106-4b83-9b05-1740e530c0e0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.977415] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc70e12b-61ca-4960-834d-ce5ce953214a tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 931.977415] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fc70e12b-61ca-4960-834d-ce5ce953214a tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 931.977585] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fc70e12b-61ca-4960-834d-ce5ce953214a tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 931.977626] env[70020]: INFO nova.compute.manager [None req-fc70e12b-61ca-4960-834d-ce5ce953214a tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Took 1.19 seconds to destroy the instance on the hypervisor. [ 931.977908] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fc70e12b-61ca-4960-834d-ce5ce953214a tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 931.978498] env[70020]: DEBUG oslo_vmware.api [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618530, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.978734] env[70020]: DEBUG nova.compute.manager [-] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 931.978835] env[70020]: DEBUG nova.network.neutron [-] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 931.990825] env[70020]: DEBUG oslo_vmware.api [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 931.990825] env[70020]: value = "task-3618532" [ 931.990825] env[70020]: _type = "Task" [ 931.990825] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.011029] env[70020]: DEBUG oslo_vmware.api [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618532, 'name': CloneVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.023909] env[70020]: DEBUG oslo_vmware.api [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': task-3618531, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.155059] env[70020]: INFO nova.compute.manager [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Took 53.50 seconds to build instance. [ 932.235951] env[70020]: DEBUG oslo_vmware.api [None req-e69847cc-877f-4186-afc9-55debae9dd34 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618527, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.5235} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.236666] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-e69847cc-877f-4186-afc9-55debae9dd34 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 932.236952] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e69847cc-877f-4186-afc9-55debae9dd34 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 932.237183] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e69847cc-877f-4186-afc9-55debae9dd34 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 932.237412] env[70020]: INFO nova.compute.manager [None req-e69847cc-877f-4186-afc9-55debae9dd34 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Took 2.16 seconds to destroy the instance on the hypervisor. [ 932.237908] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e69847cc-877f-4186-afc9-55debae9dd34 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 932.239061] env[70020]: DEBUG nova.compute.manager [-] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 932.239061] env[70020]: DEBUG nova.network.neutron [-] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 932.459671] env[70020]: DEBUG oslo_concurrency.lockutils [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Releasing lock "refresh_cache-3a4f2342-58e7-436b-a779-0fa093b52409" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 932.459984] env[70020]: DEBUG nova.compute.manager [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Instance network_info: |[{"id": "d1869b01-6eea-468c-ac71-153c8eeda8ca", "address": "fa:16:3e:e0:89:b7", "network": {"id": "ba2c4d3c-4191-4de5-8533-90ca5dfc1dc0", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-599216784-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e3eae740ef84ef88aef113ed4d6e57b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1869b01-6e", "ovs_interfaceid": "d1869b01-6eea-468c-ac71-153c8eeda8ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 932.460293] env[70020]: DEBUG oslo_vmware.api [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618530, 'name': PowerOnVM_Task, 'duration_secs': 0.646275} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.461081] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e0:89:b7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5b21ab10-d886-4453-9472-9e11fb3c450d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd1869b01-6eea-468c-ac71-153c8eeda8ca', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 932.469381] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 932.469615] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 932.469811] env[70020]: INFO nova.compute.manager [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Took 8.90 seconds to spawn the instance on the hypervisor. [ 932.469985] env[70020]: DEBUG nova.compute.manager [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 932.473046] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 932.473854] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7e4dffb-ab71-48f1-8722-a634278876d3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.478390] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-be730a32-d92a-4d00-b343-a658b575fa2e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.511021] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 932.511021] env[70020]: value = "task-3618533" [ 932.511021] env[70020]: _type = "Task" [ 932.511021] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.516167] env[70020]: DEBUG oslo_vmware.api [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618532, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.531270] env[70020]: DEBUG oslo_vmware.api [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': task-3618531, 'name': ReconfigVM_Task, 'duration_secs': 0.754281} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.535134] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Reconfigured VM instance instance-0000004c to attach disk [datastore2] ff4e958d-0068-429f-af76-5e7d4dd147f3/ff4e958d-0068-429f-af76-5e7d4dd147f3.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 932.535341] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618533, 'name': CreateVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.535465] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4c3a86c1-39a2-4ab7-bab9-93d1c4a61458 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.544470] env[70020]: DEBUG oslo_vmware.api [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Waiting for the task: (returnval){ [ 932.544470] env[70020]: value = "task-3618534" [ 932.544470] env[70020]: _type = "Task" [ 932.544470] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.557045] env[70020]: DEBUG oslo_vmware.api [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': task-3618534, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.657251] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8c94a7e6-d7d3-491f-bb90-60dd74dc03af tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Lock "7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.568s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 932.889863] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd8a628c-75a3-4c1f-bf72-6133db422f71 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.902162] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-744b4fd0-4970-4a44-9949-ba0f459f3893 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.940985] env[70020]: DEBUG nova.network.neutron [-] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.945907] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8a16c99-1c74-4732-9cc9-e54496f72429 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.950957] env[70020]: DEBUG nova.compute.manager [req-baec5ec6-e371-4aa2-976d-8e6839f45c85 req-361939e0-0041-4e96-be3f-57227b8328f6 service nova] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Received event network-changed-d1869b01-6eea-468c-ac71-153c8eeda8ca {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 932.951153] env[70020]: DEBUG nova.compute.manager [req-baec5ec6-e371-4aa2-976d-8e6839f45c85 req-361939e0-0041-4e96-be3f-57227b8328f6 service nova] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Refreshing instance network info cache due to event network-changed-d1869b01-6eea-468c-ac71-153c8eeda8ca. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 932.955684] env[70020]: DEBUG oslo_concurrency.lockutils [req-baec5ec6-e371-4aa2-976d-8e6839f45c85 req-361939e0-0041-4e96-be3f-57227b8328f6 service nova] Acquiring lock "refresh_cache-3a4f2342-58e7-436b-a779-0fa093b52409" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.955684] env[70020]: DEBUG oslo_concurrency.lockutils [req-baec5ec6-e371-4aa2-976d-8e6839f45c85 req-361939e0-0041-4e96-be3f-57227b8328f6 service nova] Acquired lock "refresh_cache-3a4f2342-58e7-436b-a779-0fa093b52409" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 932.955684] env[70020]: DEBUG nova.network.neutron [req-baec5ec6-e371-4aa2-976d-8e6839f45c85 req-361939e0-0041-4e96-be3f-57227b8328f6 service nova] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Refreshing network info cache for port d1869b01-6eea-468c-ac71-153c8eeda8ca {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 932.962203] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d58dfe12-2a96-46e1-8a69-46c1c3d0408f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.982430] env[70020]: DEBUG nova.compute.provider_tree [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 933.013222] env[70020]: DEBUG oslo_vmware.api [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618532, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.019899] env[70020]: INFO nova.compute.manager [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Took 50.62 seconds to build instance. [ 933.027961] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618533, 'name': CreateVM_Task, 'duration_secs': 0.424131} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.028156] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 933.029167] env[70020]: DEBUG oslo_concurrency.lockutils [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.029465] env[70020]: DEBUG oslo_concurrency.lockutils [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 933.029792] env[70020]: DEBUG oslo_concurrency.lockutils [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 933.032040] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3355945b-ad63-4213-a85d-0e8088b822fd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.037192] env[70020]: DEBUG oslo_vmware.api [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 933.037192] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5232b977-7094-479c-87d7-4878b9f4ed10" [ 933.037192] env[70020]: _type = "Task" [ 933.037192] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.044321] env[70020]: DEBUG nova.compute.manager [req-9e8e3229-a6df-48fb-a04d-4faaf5cf6d88 req-806c2a96-302d-4962-acfd-1ca0e470890b service nova] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Received event network-vif-deleted-5b0a839b-040e-424a-b8ad-91a46034cde9 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 933.044533] env[70020]: INFO nova.compute.manager [req-9e8e3229-a6df-48fb-a04d-4faaf5cf6d88 req-806c2a96-302d-4962-acfd-1ca0e470890b service nova] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Neutron deleted interface 5b0a839b-040e-424a-b8ad-91a46034cde9; detaching it from the instance and deleting it from the info cache [ 933.044704] env[70020]: DEBUG nova.network.neutron [req-9e8e3229-a6df-48fb-a04d-4faaf5cf6d88 req-806c2a96-302d-4962-acfd-1ca0e470890b service nova] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.059235] env[70020]: DEBUG oslo_vmware.api [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5232b977-7094-479c-87d7-4878b9f4ed10, 'name': SearchDatastore_Task, 'duration_secs': 0.013338} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.059952] env[70020]: DEBUG oslo_concurrency.lockutils [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 933.060294] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 933.060550] env[70020]: DEBUG oslo_concurrency.lockutils [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.060697] env[70020]: DEBUG oslo_concurrency.lockutils [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 933.060870] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 933.061445] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f3b67104-32f7-4e49-a3c9-9f7eff537e6e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.067960] env[70020]: DEBUG oslo_vmware.api [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': task-3618534, 'name': Rename_Task, 'duration_secs': 0.222818} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.068625] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 933.068911] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-510d2f74-e66d-4d8a-8fb2-63d8511d492a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.076698] env[70020]: DEBUG oslo_vmware.api [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Waiting for the task: (returnval){ [ 933.076698] env[70020]: value = "task-3618535" [ 933.076698] env[70020]: _type = "Task" [ 933.076698] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.078109] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 933.078221] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 933.083185] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d999c5d9-7c33-4dad-9c8e-7b3cdfdc9ab1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.091342] env[70020]: DEBUG oslo_vmware.api [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': task-3618535, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.092422] env[70020]: DEBUG oslo_vmware.api [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 933.092422] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]526b0c50-c66e-b577-0d42-85148fc8a2a1" [ 933.092422] env[70020]: _type = "Task" [ 933.092422] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.104814] env[70020]: DEBUG oslo_vmware.api [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]526b0c50-c66e-b577-0d42-85148fc8a2a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.161704] env[70020]: DEBUG nova.compute.manager [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 933.184452] env[70020]: DEBUG nova.network.neutron [-] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.446365] env[70020]: INFO nova.compute.manager [-] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Took 1.47 seconds to deallocate network for instance. [ 933.487068] env[70020]: DEBUG nova.scheduler.client.report [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 933.518306] env[70020]: DEBUG oslo_vmware.api [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618532, 'name': CloneVM_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.524709] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd5b4c1b-4f3f-4068-95a1-995f4cf0f4c1 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Lock "edef9245-4048-4ea4-90cc-ebed54498d88" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.084s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 933.551970] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-79a19531-eb23-4f07-b7ce-f37aaa79a79b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.565237] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7199e126-cf06-458d-a39a-c35b739dd043 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.594395] env[70020]: DEBUG oslo_vmware.api [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': task-3618535, 'name': PowerOnVM_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.602621] env[70020]: DEBUG oslo_vmware.api [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]526b0c50-c66e-b577-0d42-85148fc8a2a1, 'name': SearchDatastore_Task, 'duration_secs': 0.013732} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.618416] env[70020]: DEBUG nova.compute.manager [req-9e8e3229-a6df-48fb-a04d-4faaf5cf6d88 req-806c2a96-302d-4962-acfd-1ca0e470890b service nova] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Detach interface failed, port_id=5b0a839b-040e-424a-b8ad-91a46034cde9, reason: Instance 8317f386-44d0-4b1b-8590-d0336fafac21 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 933.624229] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89034bca-aa6b-49da-8c62-6c1e1e709feb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.628530] env[70020]: DEBUG oslo_vmware.api [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 933.628530] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]529af35a-7bcf-928f-b368-1e3b1b4d4813" [ 933.628530] env[70020]: _type = "Task" [ 933.628530] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.637075] env[70020]: DEBUG oslo_vmware.api [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]529af35a-7bcf-928f-b368-1e3b1b4d4813, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.686929] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 933.689585] env[70020]: INFO nova.compute.manager [-] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Took 1.45 seconds to deallocate network for instance. [ 933.715394] env[70020]: DEBUG nova.network.neutron [req-baec5ec6-e371-4aa2-976d-8e6839f45c85 req-361939e0-0041-4e96-be3f-57227b8328f6 service nova] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Updated VIF entry in instance network info cache for port d1869b01-6eea-468c-ac71-153c8eeda8ca. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 933.716716] env[70020]: DEBUG nova.network.neutron [req-baec5ec6-e371-4aa2-976d-8e6839f45c85 req-361939e0-0041-4e96-be3f-57227b8328f6 service nova] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Updating instance_info_cache with network_info: [{"id": "d1869b01-6eea-468c-ac71-153c8eeda8ca", "address": "fa:16:3e:e0:89:b7", "network": {"id": "ba2c4d3c-4191-4de5-8533-90ca5dfc1dc0", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-599216784-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e3eae740ef84ef88aef113ed4d6e57b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1869b01-6e", "ovs_interfaceid": "d1869b01-6eea-468c-ac71-153c8eeda8ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.953368] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fc70e12b-61ca-4960-834d-ce5ce953214a tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 934.014632] env[70020]: DEBUG oslo_vmware.api [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618532, 'name': CloneVM_Task, 'duration_secs': 1.535591} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.014955] env[70020]: INFO nova.virt.vmwareapi.vmops [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Created linked-clone VM from snapshot [ 934.015832] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26fa9212-a463-4034-b491-f2d68752ade9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.025524] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Uploading image d9b0c957-d1f7-448d-bc65-21d831f5ff6b {{(pid=70020) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 934.031130] env[70020]: DEBUG nova.compute.manager [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 934.058466] env[70020]: DEBUG oslo_vmware.rw_handles [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 934.058466] env[70020]: value = "vm-721735" [ 934.058466] env[70020]: _type = "VirtualMachine" [ 934.058466] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 934.058466] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-b159f36a-ad21-4b48-b51d-e309348c062f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.068232] env[70020]: DEBUG oslo_vmware.rw_handles [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lease: (returnval){ [ 934.068232] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]528cb4df-d625-5cee-7f22-48b7238a08c4" [ 934.068232] env[70020]: _type = "HttpNfcLease" [ 934.068232] env[70020]: } obtained for exporting VM: (result){ [ 934.068232] env[70020]: value = "vm-721735" [ 934.068232] env[70020]: _type = "VirtualMachine" [ 934.068232] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 934.068232] env[70020]: DEBUG oslo_vmware.api [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the lease: (returnval){ [ 934.068232] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]528cb4df-d625-5cee-7f22-48b7238a08c4" [ 934.068232] env[70020]: _type = "HttpNfcLease" [ 934.068232] env[70020]: } to be ready. {{(pid=70020) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 934.076878] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 934.076878] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]528cb4df-d625-5cee-7f22-48b7238a08c4" [ 934.076878] env[70020]: _type = "HttpNfcLease" [ 934.076878] env[70020]: } is initializing. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 934.096034] env[70020]: DEBUG oslo_vmware.api [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': task-3618535, 'name': PowerOnVM_Task, 'duration_secs': 0.641376} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.096034] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 934.096034] env[70020]: INFO nova.compute.manager [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Took 7.63 seconds to spawn the instance on the hypervisor. [ 934.096034] env[70020]: DEBUG nova.compute.manager [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 934.096619] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1019845-aaf1-48dc-a9a5-68aff53d04c3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.140853] env[70020]: DEBUG oslo_vmware.api [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]529af35a-7bcf-928f-b368-1e3b1b4d4813, 'name': SearchDatastore_Task, 'duration_secs': 0.022284} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.141863] env[70020]: DEBUG oslo_concurrency.lockutils [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 934.142152] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 3a4f2342-58e7-436b-a779-0fa093b52409/3a4f2342-58e7-436b-a779-0fa093b52409.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 934.142428] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-801178e8-cd6a-4062-b22f-073b368eb947 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.151786] env[70020]: DEBUG oslo_vmware.api [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 934.151786] env[70020]: value = "task-3618537" [ 934.151786] env[70020]: _type = "Task" [ 934.151786] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.162256] env[70020]: DEBUG oslo_vmware.api [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618537, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.199070] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e69847cc-877f-4186-afc9-55debae9dd34 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 934.218385] env[70020]: DEBUG oslo_concurrency.lockutils [req-baec5ec6-e371-4aa2-976d-8e6839f45c85 req-361939e0-0041-4e96-be3f-57227b8328f6 service nova] Releasing lock "refresh_cache-3a4f2342-58e7-436b-a779-0fa093b52409" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 934.218663] env[70020]: DEBUG nova.compute.manager [req-baec5ec6-e371-4aa2-976d-8e6839f45c85 req-361939e0-0041-4e96-be3f-57227b8328f6 service nova] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Received event network-vif-deleted-2a10027e-1a93-40ca-a079-297eb6af7618 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 934.218838] env[70020]: INFO nova.compute.manager [req-baec5ec6-e371-4aa2-976d-8e6839f45c85 req-361939e0-0041-4e96-be3f-57227b8328f6 service nova] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Neutron deleted interface 2a10027e-1a93-40ca-a079-297eb6af7618; detaching it from the instance and deleting it from the info cache [ 934.219008] env[70020]: DEBUG nova.network.neutron [req-baec5ec6-e371-4aa2-976d-8e6839f45c85 req-361939e0-0041-4e96-be3f-57227b8328f6 service nova] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.509139] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.289s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 934.515928] env[70020]: DEBUG oslo_concurrency.lockutils [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.509s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 934.518319] env[70020]: INFO nova.compute.claims [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 934.562755] env[70020]: DEBUG oslo_concurrency.lockutils [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 934.575952] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 934.575952] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]528cb4df-d625-5cee-7f22-48b7238a08c4" [ 934.575952] env[70020]: _type = "HttpNfcLease" [ 934.575952] env[70020]: } is ready. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 934.576254] env[70020]: DEBUG oslo_vmware.rw_handles [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 934.576254] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]528cb4df-d625-5cee-7f22-48b7238a08c4" [ 934.576254] env[70020]: _type = "HttpNfcLease" [ 934.576254] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 934.577115] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dd5a3d4-579d-496e-adff-cc5a0b81386d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.586636] env[70020]: DEBUG oslo_vmware.rw_handles [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523fa414-5a55-8467-4796-39e7876b765e/disk-0.vmdk from lease info. {{(pid=70020) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 934.586823] env[70020]: DEBUG oslo_vmware.rw_handles [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523fa414-5a55-8467-4796-39e7876b765e/disk-0.vmdk for reading. {{(pid=70020) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 934.662756] env[70020]: INFO nova.compute.manager [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Took 46.89 seconds to build instance. [ 934.672141] env[70020]: DEBUG oslo_vmware.api [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618537, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.708094] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-34cab6fe-080a-4a7e-b473-24517b928bff {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.729921] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b1123d26-ca68-4069-951d-8753f48b05b5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.743165] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fd31d5e-ba0d-45a8-b8d9-f9684fe895a7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.800662] env[70020]: DEBUG nova.compute.manager [req-baec5ec6-e371-4aa2-976d-8e6839f45c85 req-361939e0-0041-4e96-be3f-57227b8328f6 service nova] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Detach interface failed, port_id=2a10027e-1a93-40ca-a079-297eb6af7618, reason: Instance c9ce57f3-f9a2-40aa-b7eb-403840c34304 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 935.164042] env[70020]: DEBUG oslo_vmware.api [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618537, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.808496} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.164319] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 3a4f2342-58e7-436b-a779-0fa093b52409/3a4f2342-58e7-436b-a779-0fa093b52409.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 935.165069] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 935.165069] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2c81a05b-0461-4679-9561-66e8c4b644ad {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.168905] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fc7cc341-c38d-4d98-81a2-33b9c8e567d0 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Lock "ff4e958d-0068-429f-af76-5e7d4dd147f3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.487s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 935.176180] env[70020]: DEBUG oslo_vmware.api [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 935.176180] env[70020]: value = "task-3618538" [ 935.176180] env[70020]: _type = "Task" [ 935.176180] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.186695] env[70020]: DEBUG oslo_vmware.api [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618538, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.671311] env[70020]: DEBUG nova.compute.manager [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 935.687967] env[70020]: DEBUG oslo_vmware.api [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618538, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079023} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.688542] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 935.691317] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-159bad80-2ce1-4486-90ae-a46a6f549ce7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.730035] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] 3a4f2342-58e7-436b-a779-0fa093b52409/3a4f2342-58e7-436b-a779-0fa093b52409.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 935.734017] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f93fc9ad-c673-47bd-81fc-ef841ededdc7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.764432] env[70020]: DEBUG oslo_vmware.api [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 935.764432] env[70020]: value = "task-3618539" [ 935.764432] env[70020]: _type = "Task" [ 935.764432] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.772825] env[70020]: DEBUG oslo_vmware.api [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618539, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.075024] env[70020]: INFO nova.compute.manager [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Swapping old allocation on dict_keys(['ee72c483-d9d9-4e62-8f73-e9f24668500d']) held by migration b5628a9f-1bd9-44da-91e8-035e91b65f82 for instance [ 936.109118] env[70020]: DEBUG nova.scheduler.client.report [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Overwriting current allocation {'allocations': {'ee72c483-d9d9-4e62-8f73-e9f24668500d': {'resources': {'VCPU': 1, 'MEMORY_MB': 256, 'DISK_GB': 1}, 'generation': 110}}, 'project_id': '051ddf351c534f65be94aef74fb2ff03', 'user_id': 'c32498a6608a43dab8045aef0b3006e7', 'consumer_generation': 1} on consumer b53f55c1-1867-410c-9c53-f552ff30d697 {{(pid=70020) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 936.194746] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 936.243445] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquiring lock "refresh_cache-b53f55c1-1867-410c-9c53-f552ff30d697" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.243644] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquired lock "refresh_cache-b53f55c1-1867-410c-9c53-f552ff30d697" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 936.243825] env[70020]: DEBUG nova.network.neutron [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 936.249570] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae09634d-face-4d43-a5ee-4b4e57f8973a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.260771] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdcda2be-fa34-4447-adf9-a658d3acc3ee {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.275783] env[70020]: DEBUG oslo_vmware.api [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618539, 'name': ReconfigVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.308842] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c43f4238-b060-4d72-98a7-e853348bcda5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.319487] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf64aa26-9979-4041-80cd-5c69d06c0292 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.336706] env[70020]: DEBUG nova.compute.provider_tree [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 936.778911] env[70020]: DEBUG oslo_vmware.api [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618539, 'name': ReconfigVM_Task, 'duration_secs': 0.575607} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.781997] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Reconfigured VM instance instance-0000004d to attach disk [datastore1] 3a4f2342-58e7-436b-a779-0fa093b52409/3a4f2342-58e7-436b-a779-0fa093b52409.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 936.782989] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7c3233b3-6a0d-467f-8081-a6e043f20ebb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.791463] env[70020]: DEBUG oslo_vmware.api [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 936.791463] env[70020]: value = "task-3618540" [ 936.791463] env[70020]: _type = "Task" [ 936.791463] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.801217] env[70020]: DEBUG oslo_vmware.api [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618540, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.841350] env[70020]: DEBUG nova.scheduler.client.report [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 937.009214] env[70020]: DEBUG nova.network.neutron [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Updating instance_info_cache with network_info: [{"id": "ac1e36da-5de5-4451-a9e7-39165ab5f152", "address": "fa:16:3e:c2:87:4b", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.51", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac1e36da-5d", "ovs_interfaceid": "ac1e36da-5de5-4451-a9e7-39165ab5f152", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.302275] env[70020]: DEBUG oslo_vmware.api [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618540, 'name': Rename_Task, 'duration_secs': 0.262691} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.302589] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 937.302873] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d97b653c-55b1-45fd-b143-181f299e7bd3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.310880] env[70020]: DEBUG oslo_vmware.api [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 937.310880] env[70020]: value = "task-3618541" [ 937.310880] env[70020]: _type = "Task" [ 937.310880] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.319319] env[70020]: DEBUG oslo_vmware.api [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618541, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.356189] env[70020]: DEBUG oslo_concurrency.lockutils [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.837s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 937.356189] env[70020]: DEBUG nova.compute.manager [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 937.357291] env[70020]: DEBUG oslo_concurrency.lockutils [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.897s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 937.359310] env[70020]: INFO nova.compute.claims [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 937.512673] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Releasing lock "refresh_cache-b53f55c1-1867-410c-9c53-f552ff30d697" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 937.513169] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 937.513498] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-33643335-98c8-4db2-b5cd-83bfed4ff20c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.521817] env[70020]: DEBUG oslo_vmware.api [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for the task: (returnval){ [ 937.521817] env[70020]: value = "task-3618542" [ 937.521817] env[70020]: _type = "Task" [ 937.521817] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.531859] env[70020]: DEBUG oslo_vmware.api [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618542, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.715714] env[70020]: INFO nova.compute.manager [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Rebuilding instance [ 937.776012] env[70020]: DEBUG nova.compute.manager [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 937.777168] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0b3ce93-29d2-4d5b-a15d-26ddb2825b1d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.824228] env[70020]: DEBUG oslo_vmware.api [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618541, 'name': PowerOnVM_Task} progress is 88%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.865563] env[70020]: DEBUG nova.compute.utils [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 937.868674] env[70020]: DEBUG nova.compute.manager [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 937.868848] env[70020]: DEBUG nova.network.neutron [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 937.924062] env[70020]: DEBUG nova.policy [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '025d293d3c0449e1b36a7aa465ad1110', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bd3733a000724aab9255cb498cecdfba', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 938.034254] env[70020]: DEBUG oslo_vmware.api [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618542, 'name': PowerOffVM_Task, 'duration_secs': 0.299208} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.034397] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 938.035025] env[70020]: DEBUG nova.virt.hardware [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T23:03:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='8fe80bc1-98b9-4377-a5a8-72095e677071',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-537589333',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 938.035405] env[70020]: DEBUG nova.virt.hardware [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 938.035405] env[70020]: DEBUG nova.virt.hardware [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 938.035580] env[70020]: DEBUG nova.virt.hardware [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 938.035728] env[70020]: DEBUG nova.virt.hardware [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 938.035877] env[70020]: DEBUG nova.virt.hardware [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 938.036106] env[70020]: DEBUG nova.virt.hardware [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 938.036270] env[70020]: DEBUG nova.virt.hardware [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 938.036440] env[70020]: DEBUG nova.virt.hardware [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 938.036683] env[70020]: DEBUG nova.virt.hardware [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 938.036771] env[70020]: DEBUG nova.virt.hardware [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 938.042285] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6054d614-3db3-4933-a808-0453632b4c17 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.059972] env[70020]: DEBUG oslo_vmware.api [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for the task: (returnval){ [ 938.059972] env[70020]: value = "task-3618543" [ 938.059972] env[70020]: _type = "Task" [ 938.059972] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.069622] env[70020]: DEBUG oslo_vmware.api [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618543, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.213154] env[70020]: DEBUG nova.network.neutron [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Successfully created port: 4a49418e-b633-42e9-b84c-6a2ece113e59 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 938.323274] env[70020]: DEBUG oslo_vmware.api [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618541, 'name': PowerOnVM_Task, 'duration_secs': 0.782287} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.323666] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 938.323800] env[70020]: INFO nova.compute.manager [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Took 9.06 seconds to spawn the instance on the hypervisor. [ 938.323979] env[70020]: DEBUG nova.compute.manager [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 938.324808] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8712498-b67a-4b92-8cc2-3305dc0fb595 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.371351] env[70020]: DEBUG nova.compute.manager [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 938.575992] env[70020]: DEBUG oslo_vmware.api [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618543, 'name': ReconfigVM_Task, 'duration_secs': 0.280532} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.576893] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da1be383-de99-43c1-88c3-80e9875bd771 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.601179] env[70020]: DEBUG nova.virt.hardware [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T23:03:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='8fe80bc1-98b9-4377-a5a8-72095e677071',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-537589333',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 938.601470] env[70020]: DEBUG nova.virt.hardware [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 938.601626] env[70020]: DEBUG nova.virt.hardware [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 938.601807] env[70020]: DEBUG nova.virt.hardware [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 938.601953] env[70020]: DEBUG nova.virt.hardware [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 938.602164] env[70020]: DEBUG nova.virt.hardware [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 938.602346] env[70020]: DEBUG nova.virt.hardware [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 938.602505] env[70020]: DEBUG nova.virt.hardware [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 938.602680] env[70020]: DEBUG nova.virt.hardware [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 938.602860] env[70020]: DEBUG nova.virt.hardware [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 938.604027] env[70020]: DEBUG nova.virt.hardware [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 938.604027] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21a3a7d0-b1d1-428a-a2b6-35f294c1de6f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.612283] env[70020]: DEBUG oslo_vmware.api [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for the task: (returnval){ [ 938.612283] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52298003-6510-28fa-7568-074ac1b0fec2" [ 938.612283] env[70020]: _type = "Task" [ 938.612283] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.625045] env[70020]: DEBUG oslo_vmware.api [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52298003-6510-28fa-7568-074ac1b0fec2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.793278] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 938.793674] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-896b2a63-7b67-4eba-8914-ccbda4670de5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.802615] env[70020]: DEBUG oslo_vmware.api [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Waiting for the task: (returnval){ [ 938.802615] env[70020]: value = "task-3618544" [ 938.802615] env[70020]: _type = "Task" [ 938.802615] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.812638] env[70020]: DEBUG oslo_vmware.api [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': task-3618544, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.849031] env[70020]: INFO nova.compute.manager [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Took 47.96 seconds to build instance. [ 939.021978] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63b3307d-d1b0-46ab-aeea-bbd075692f11 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.029964] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba9bbaf7-125c-4939-b58c-9c38b607714a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.063971] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d3ddfa4-ebec-4a4c-b77a-14e2a0a9fd5e tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Acquiring lock "45926a02-d0fe-4274-ba47-b97b3e12e4cd" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 939.064285] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d3ddfa4-ebec-4a4c-b77a-14e2a0a9fd5e tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Lock "45926a02-d0fe-4274-ba47-b97b3e12e4cd" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 939.064471] env[70020]: DEBUG nova.compute.manager [None req-5d3ddfa4-ebec-4a4c-b77a-14e2a0a9fd5e tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 939.066291] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f770034-e484-45fd-9eba-246adac76149 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.069402] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0266f5e-33d7-4db5-9061-5e6c4e1b92be {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.077149] env[70020]: DEBUG nova.compute.manager [None req-5d3ddfa4-ebec-4a4c-b77a-14e2a0a9fd5e tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=70020) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 939.077784] env[70020]: DEBUG nova.objects.instance [None req-5d3ddfa4-ebec-4a4c-b77a-14e2a0a9fd5e tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Lazy-loading 'flavor' on Instance uuid 45926a02-d0fe-4274-ba47-b97b3e12e4cd {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 939.082845] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd5af163-d3f4-43ec-811c-779149f0b9c2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.100036] env[70020]: DEBUG nova.compute.provider_tree [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 939.126372] env[70020]: DEBUG oslo_vmware.api [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52298003-6510-28fa-7568-074ac1b0fec2, 'name': SearchDatastore_Task, 'duration_secs': 0.017118} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.131813] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Reconfiguring VM instance instance-00000038 to detach disk 2000 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 939.132427] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ecd52b39-924a-42d0-bdd5-4c3008310e14 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.152691] env[70020]: DEBUG oslo_vmware.api [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for the task: (returnval){ [ 939.152691] env[70020]: value = "task-3618545" [ 939.152691] env[70020]: _type = "Task" [ 939.152691] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.163333] env[70020]: DEBUG oslo_vmware.api [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618545, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.313871] env[70020]: DEBUG oslo_vmware.api [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': task-3618544, 'name': PowerOffVM_Task, 'duration_secs': 0.220457} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.314173] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 939.314887] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 939.315858] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9864b35-2466-4d0a-9fd3-62a45579714d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.325046] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 939.325210] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1f85371e-ce29-4d34-ae29-8108f51ef436 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.350916] env[70020]: DEBUG oslo_concurrency.lockutils [None req-68f7e086-feb1-4b0e-b967-c09b34bcf1db tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "3a4f2342-58e7-436b-a779-0fa093b52409" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.024s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 939.357984] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 939.358240] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 939.358425] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Deleting the datastore file [datastore2] ff4e958d-0068-429f-af76-5e7d4dd147f3 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 939.358708] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-39b60df3-c0c7-4e0b-bf43-c23b3ebdedec {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.366827] env[70020]: DEBUG oslo_vmware.api [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Waiting for the task: (returnval){ [ 939.366827] env[70020]: value = "task-3618547" [ 939.366827] env[70020]: _type = "Task" [ 939.366827] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.376422] env[70020]: DEBUG oslo_vmware.api [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': task-3618547, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.383797] env[70020]: DEBUG nova.compute.manager [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 939.409124] env[70020]: DEBUG nova.virt.hardware [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 939.409378] env[70020]: DEBUG nova.virt.hardware [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 939.409565] env[70020]: DEBUG nova.virt.hardware [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 939.409817] env[70020]: DEBUG nova.virt.hardware [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 939.411017] env[70020]: DEBUG nova.virt.hardware [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 939.411017] env[70020]: DEBUG nova.virt.hardware [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 939.411017] env[70020]: DEBUG nova.virt.hardware [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 939.411017] env[70020]: DEBUG nova.virt.hardware [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 939.411017] env[70020]: DEBUG nova.virt.hardware [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 939.411418] env[70020]: DEBUG nova.virt.hardware [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 939.411418] env[70020]: DEBUG nova.virt.hardware [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 939.412348] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1577013-22bd-44df-806c-d075bd4dfeff {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.422318] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00ccfb13-2d47-44ec-876d-56cb73597be3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.603827] env[70020]: DEBUG nova.scheduler.client.report [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 939.666163] env[70020]: DEBUG oslo_vmware.api [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618545, 'name': ReconfigVM_Task, 'duration_secs': 0.235607} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.666480] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Reconfigured VM instance instance-00000038 to detach disk 2000 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 939.667299] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21c0a8bf-4a1f-47a6-b01b-dc0127a83fdf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.691902] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] b53f55c1-1867-410c-9c53-f552ff30d697/b53f55c1-1867-410c-9c53-f552ff30d697.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 939.692627] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4bbcef57-f751-4ac4-a0c1-41694e661e16 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.713204] env[70020]: DEBUG oslo_vmware.api [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for the task: (returnval){ [ 939.713204] env[70020]: value = "task-3618548" [ 939.713204] env[70020]: _type = "Task" [ 939.713204] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.717545] env[70020]: DEBUG oslo_concurrency.lockutils [None req-52468b9b-9f69-4190-b7fb-0a6185aa7dca tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Acquiring lock "4335f92a-897a-4779-be70-4f0754a66d53" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 939.717862] env[70020]: DEBUG oslo_concurrency.lockutils [None req-52468b9b-9f69-4190-b7fb-0a6185aa7dca tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Lock "4335f92a-897a-4779-be70-4f0754a66d53" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 939.718096] env[70020]: DEBUG oslo_concurrency.lockutils [None req-52468b9b-9f69-4190-b7fb-0a6185aa7dca tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Acquiring lock "4335f92a-897a-4779-be70-4f0754a66d53-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 939.718289] env[70020]: DEBUG oslo_concurrency.lockutils [None req-52468b9b-9f69-4190-b7fb-0a6185aa7dca tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Lock "4335f92a-897a-4779-be70-4f0754a66d53-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 939.718459] env[70020]: DEBUG oslo_concurrency.lockutils [None req-52468b9b-9f69-4190-b7fb-0a6185aa7dca tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Lock "4335f92a-897a-4779-be70-4f0754a66d53-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 939.723246] env[70020]: INFO nova.compute.manager [None req-52468b9b-9f69-4190-b7fb-0a6185aa7dca tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Terminating instance [ 939.729024] env[70020]: DEBUG oslo_vmware.api [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618548, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.781508] env[70020]: DEBUG nova.network.neutron [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Successfully updated port: 4a49418e-b633-42e9-b84c-6a2ece113e59 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 939.878756] env[70020]: DEBUG oslo_vmware.api [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': task-3618547, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.128912} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.879467] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 939.879775] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 939.880083] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 939.960956] env[70020]: DEBUG nova.compute.manager [req-3a744217-7c08-4565-82cc-814595a50c56 req-7856693d-9e1e-47f6-ace4-0ac5a29bad42 service nova] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Received event network-vif-plugged-4a49418e-b633-42e9-b84c-6a2ece113e59 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 939.961207] env[70020]: DEBUG oslo_concurrency.lockutils [req-3a744217-7c08-4565-82cc-814595a50c56 req-7856693d-9e1e-47f6-ace4-0ac5a29bad42 service nova] Acquiring lock "9dec24d6-af8a-41b9-920c-e4420fc69417-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 939.961443] env[70020]: DEBUG oslo_concurrency.lockutils [req-3a744217-7c08-4565-82cc-814595a50c56 req-7856693d-9e1e-47f6-ace4-0ac5a29bad42 service nova] Lock "9dec24d6-af8a-41b9-920c-e4420fc69417-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 939.961635] env[70020]: DEBUG oslo_concurrency.lockutils [req-3a744217-7c08-4565-82cc-814595a50c56 req-7856693d-9e1e-47f6-ace4-0ac5a29bad42 service nova] Lock "9dec24d6-af8a-41b9-920c-e4420fc69417-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 939.961881] env[70020]: DEBUG nova.compute.manager [req-3a744217-7c08-4565-82cc-814595a50c56 req-7856693d-9e1e-47f6-ace4-0ac5a29bad42 service nova] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] No waiting events found dispatching network-vif-plugged-4a49418e-b633-42e9-b84c-6a2ece113e59 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 939.963452] env[70020]: WARNING nova.compute.manager [req-3a744217-7c08-4565-82cc-814595a50c56 req-7856693d-9e1e-47f6-ace4-0ac5a29bad42 service nova] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Received unexpected event network-vif-plugged-4a49418e-b633-42e9-b84c-6a2ece113e59 for instance with vm_state building and task_state spawning. [ 940.087267] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d3ddfa4-ebec-4a4c-b77a-14e2a0a9fd5e tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 940.087610] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e8fa53cd-d9b4-40dd-b2a1-3e4aa0a79032 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.096047] env[70020]: DEBUG oslo_vmware.api [None req-5d3ddfa4-ebec-4a4c-b77a-14e2a0a9fd5e tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Waiting for the task: (returnval){ [ 940.096047] env[70020]: value = "task-3618549" [ 940.096047] env[70020]: _type = "Task" [ 940.096047] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.106471] env[70020]: DEBUG oslo_vmware.api [None req-5d3ddfa4-ebec-4a4c-b77a-14e2a0a9fd5e tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618549, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.112031] env[70020]: DEBUG oslo_concurrency.lockutils [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.751s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 940.112031] env[70020]: DEBUG nova.compute.manager [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 940.112031] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7b0ce31a-b84b-4f93-8233-a2279c30bdf5 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.413s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 940.112031] env[70020]: DEBUG nova.objects.instance [None req-7b0ce31a-b84b-4f93-8233-a2279c30bdf5 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Lazy-loading 'resources' on Instance uuid 422ca332-5952-443c-a22e-67b1b45df5b9 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 940.226012] env[70020]: DEBUG oslo_vmware.api [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618548, 'name': ReconfigVM_Task, 'duration_secs': 0.483075} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.226382] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Reconfigured VM instance instance-00000038 to attach disk [datastore1] b53f55c1-1867-410c-9c53-f552ff30d697/b53f55c1-1867-410c-9c53-f552ff30d697.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 940.227412] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf179701-5d18-4cc2-9485-044fb2352070 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.232459] env[70020]: DEBUG nova.compute.manager [None req-52468b9b-9f69-4190-b7fb-0a6185aa7dca tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 940.232681] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-52468b9b-9f69-4190-b7fb-0a6185aa7dca tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 940.233673] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01b5232c-30ab-4a7b-ac6d-e8b16c4088ca {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.255420] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40e1da09-9cd7-4009-932a-078d7afdbf0a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.260309] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-52468b9b-9f69-4190-b7fb-0a6185aa7dca tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 940.260944] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-854e219b-c7ef-4b94-9914-ce0f59ed7b58 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.284401] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56352b76-91e3-4c19-b444-4d0baab9f627 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.287637] env[70020]: DEBUG oslo_vmware.api [None req-52468b9b-9f69-4190-b7fb-0a6185aa7dca tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Waiting for the task: (returnval){ [ 940.287637] env[70020]: value = "task-3618550" [ 940.287637] env[70020]: _type = "Task" [ 940.287637] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.288233] env[70020]: DEBUG oslo_concurrency.lockutils [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "refresh_cache-9dec24d6-af8a-41b9-920c-e4420fc69417" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.288883] env[70020]: DEBUG oslo_concurrency.lockutils [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquired lock "refresh_cache-9dec24d6-af8a-41b9-920c-e4420fc69417" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 940.288883] env[70020]: DEBUG nova.network.neutron [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 940.318055] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b2fc720-5cda-4f45-aee0-e24bb0de99b0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.324986] env[70020]: DEBUG oslo_vmware.api [None req-52468b9b-9f69-4190-b7fb-0a6185aa7dca tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618550, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.330892] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 940.331217] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ee6a9432-e863-4075-8a54-e5254fc28c5e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.342749] env[70020]: DEBUG oslo_vmware.api [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for the task: (returnval){ [ 940.342749] env[70020]: value = "task-3618551" [ 940.342749] env[70020]: _type = "Task" [ 940.342749] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.350731] env[70020]: DEBUG oslo_vmware.api [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618551, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.608136] env[70020]: DEBUG oslo_vmware.api [None req-5d3ddfa4-ebec-4a4c-b77a-14e2a0a9fd5e tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618549, 'name': PowerOffVM_Task, 'duration_secs': 0.240536} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.608136] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d3ddfa4-ebec-4a4c-b77a-14e2a0a9fd5e tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 940.608136] env[70020]: DEBUG nova.compute.manager [None req-5d3ddfa4-ebec-4a4c-b77a-14e2a0a9fd5e tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 940.608656] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-193bd3dc-c7af-4500-9609-8dde1c0ea254 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.619119] env[70020]: DEBUG nova.compute.utils [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 940.622621] env[70020]: DEBUG nova.compute.manager [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 940.622798] env[70020]: DEBUG nova.network.neutron [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 940.673241] env[70020]: DEBUG nova.policy [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '63b30465e7ae445182c69465b29a7bc3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cb58ecf407874509b7072d5d2cf838a4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 940.756038] env[70020]: DEBUG nova.compute.manager [req-5a126bea-3464-4a42-8371-011dd0ac08a9 req-47862750-c169-44be-8c34-d81a84713543 service nova] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Received event network-changed-d1869b01-6eea-468c-ac71-153c8eeda8ca {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 940.756038] env[70020]: DEBUG nova.compute.manager [req-5a126bea-3464-4a42-8371-011dd0ac08a9 req-47862750-c169-44be-8c34-d81a84713543 service nova] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Refreshing instance network info cache due to event network-changed-d1869b01-6eea-468c-ac71-153c8eeda8ca. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 940.756210] env[70020]: DEBUG oslo_concurrency.lockutils [req-5a126bea-3464-4a42-8371-011dd0ac08a9 req-47862750-c169-44be-8c34-d81a84713543 service nova] Acquiring lock "refresh_cache-3a4f2342-58e7-436b-a779-0fa093b52409" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.757219] env[70020]: DEBUG oslo_concurrency.lockutils [req-5a126bea-3464-4a42-8371-011dd0ac08a9 req-47862750-c169-44be-8c34-d81a84713543 service nova] Acquired lock "refresh_cache-3a4f2342-58e7-436b-a779-0fa093b52409" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 940.757219] env[70020]: DEBUG nova.network.neutron [req-5a126bea-3464-4a42-8371-011dd0ac08a9 req-47862750-c169-44be-8c34-d81a84713543 service nova] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Refreshing network info cache for port d1869b01-6eea-468c-ac71-153c8eeda8ca {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 940.806758] env[70020]: DEBUG oslo_vmware.api [None req-52468b9b-9f69-4190-b7fb-0a6185aa7dca tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618550, 'name': PowerOffVM_Task, 'duration_secs': 0.270571} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.807081] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-52468b9b-9f69-4190-b7fb-0a6185aa7dca tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 940.807382] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-52468b9b-9f69-4190-b7fb-0a6185aa7dca tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 940.807549] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bc9a6cdc-d39f-4e70-95e5-856bdc10e20a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.853769] env[70020]: DEBUG oslo_vmware.api [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618551, 'name': PowerOnVM_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.857953] env[70020]: DEBUG nova.network.neutron [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 940.897482] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-52468b9b-9f69-4190-b7fb-0a6185aa7dca tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 940.897821] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-52468b9b-9f69-4190-b7fb-0a6185aa7dca tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 940.897911] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-52468b9b-9f69-4190-b7fb-0a6185aa7dca tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Deleting the datastore file [datastore2] 4335f92a-897a-4779-be70-4f0754a66d53 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 940.898673] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e1131289-3578-41f1-aa5a-ede40a639dd7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.907467] env[70020]: DEBUG oslo_vmware.api [None req-52468b9b-9f69-4190-b7fb-0a6185aa7dca tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Waiting for the task: (returnval){ [ 940.907467] env[70020]: value = "task-3618553" [ 940.907467] env[70020]: _type = "Task" [ 940.907467] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.919635] env[70020]: DEBUG oslo_vmware.api [None req-52468b9b-9f69-4190-b7fb-0a6185aa7dca tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618553, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.939769] env[70020]: DEBUG nova.virt.hardware [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 940.940143] env[70020]: DEBUG nova.virt.hardware [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 940.940325] env[70020]: DEBUG nova.virt.hardware [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 940.941883] env[70020]: DEBUG nova.virt.hardware [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 940.941883] env[70020]: DEBUG nova.virt.hardware [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 940.941883] env[70020]: DEBUG nova.virt.hardware [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 940.941883] env[70020]: DEBUG nova.virt.hardware [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 940.942271] env[70020]: DEBUG nova.virt.hardware [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 940.942271] env[70020]: DEBUG nova.virt.hardware [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 940.942418] env[70020]: DEBUG nova.virt.hardware [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 940.942584] env[70020]: DEBUG nova.virt.hardware [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 940.943689] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49a368fc-25bd-4205-a60c-71bdb1835ec1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.958701] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4be0d0b6-afcc-4e71-a9c2-0846cd8b3728 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.982211] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Instance VIF info [] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 940.987259] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 940.990704] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 940.991405] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c221db45-5091-48b5-8222-4d2a5957a659 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.037382] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 941.037382] env[70020]: value = "task-3618554" [ 941.037382] env[70020]: _type = "Task" [ 941.037382] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.045202] env[70020]: DEBUG nova.network.neutron [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Successfully created port: 85556b4a-7248-4583-8362-b471b8b0abbe {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 941.055386] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618554, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.124726] env[70020]: DEBUG nova.compute.manager [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 941.131175] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d3ddfa4-ebec-4a4c-b77a-14e2a0a9fd5e tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Lock "45926a02-d0fe-4274-ba47-b97b3e12e4cd" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.067s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 941.147053] env[70020]: DEBUG nova.network.neutron [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Updating instance_info_cache with network_info: [{"id": "4a49418e-b633-42e9-b84c-6a2ece113e59", "address": "fa:16:3e:f3:05:c6", "network": {"id": "bff860aa-ea09-4260-84e8-d9ffd2c5bf4b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1893681432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd3733a000724aab9255cb498cecdfba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a49418e-b6", "ovs_interfaceid": "4a49418e-b633-42e9-b84c-6a2ece113e59", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.264264] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-208c7d4a-3569-475b-991c-1c68ca251879 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.276639] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-115e8691-2812-498f-8600-e8d741b7af73 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.340540] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86614bf5-379e-4cc6-864c-3d87454b0c74 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.361996] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ed678ed-942b-4697-b8fd-4c619b36a3a9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.368968] env[70020]: DEBUG oslo_vmware.api [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618551, 'name': PowerOnVM_Task, 'duration_secs': 0.525119} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.369469] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 941.387114] env[70020]: DEBUG nova.compute.provider_tree [None req-7b0ce31a-b84b-4f93-8233-a2279c30bdf5 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 941.420620] env[70020]: DEBUG oslo_vmware.api [None req-52468b9b-9f69-4190-b7fb-0a6185aa7dca tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618553, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.246327} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.420876] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-52468b9b-9f69-4190-b7fb-0a6185aa7dca tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 941.421068] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-52468b9b-9f69-4190-b7fb-0a6185aa7dca tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 941.421303] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-52468b9b-9f69-4190-b7fb-0a6185aa7dca tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 941.421488] env[70020]: INFO nova.compute.manager [None req-52468b9b-9f69-4190-b7fb-0a6185aa7dca tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Took 1.19 seconds to destroy the instance on the hypervisor. [ 941.421719] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-52468b9b-9f69-4190-b7fb-0a6185aa7dca tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 941.421900] env[70020]: DEBUG nova.compute.manager [-] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 941.421988] env[70020]: DEBUG nova.network.neutron [-] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 941.548690] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618554, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.651071] env[70020]: DEBUG oslo_concurrency.lockutils [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Releasing lock "refresh_cache-9dec24d6-af8a-41b9-920c-e4420fc69417" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 941.651071] env[70020]: DEBUG nova.compute.manager [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Instance network_info: |[{"id": "4a49418e-b633-42e9-b84c-6a2ece113e59", "address": "fa:16:3e:f3:05:c6", "network": {"id": "bff860aa-ea09-4260-84e8-d9ffd2c5bf4b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1893681432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd3733a000724aab9255cb498cecdfba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a49418e-b6", "ovs_interfaceid": "4a49418e-b633-42e9-b84c-6a2ece113e59", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 941.651071] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f3:05:c6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3952eb02-1162-48ed-8227-9c138960d583', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4a49418e-b633-42e9-b84c-6a2ece113e59', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 941.659406] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 941.659759] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 941.660016] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ce37c90a-2e19-4991-9037-0cfebecce038 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.675237] env[70020]: DEBUG nova.network.neutron [req-5a126bea-3464-4a42-8371-011dd0ac08a9 req-47862750-c169-44be-8c34-d81a84713543 service nova] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Updated VIF entry in instance network info cache for port d1869b01-6eea-468c-ac71-153c8eeda8ca. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 941.675616] env[70020]: DEBUG nova.network.neutron [req-5a126bea-3464-4a42-8371-011dd0ac08a9 req-47862750-c169-44be-8c34-d81a84713543 service nova] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Updating instance_info_cache with network_info: [{"id": "d1869b01-6eea-468c-ac71-153c8eeda8ca", "address": "fa:16:3e:e0:89:b7", "network": {"id": "ba2c4d3c-4191-4de5-8533-90ca5dfc1dc0", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-599216784-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e3eae740ef84ef88aef113ed4d6e57b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1869b01-6e", "ovs_interfaceid": "d1869b01-6eea-468c-ac71-153c8eeda8ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.685983] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 941.685983] env[70020]: value = "task-3618555" [ 941.685983] env[70020]: _type = "Task" [ 941.685983] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.696875] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618555, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.891590] env[70020]: DEBUG nova.scheduler.client.report [None req-7b0ce31a-b84b-4f93-8233-a2279c30bdf5 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 942.049963] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618554, 'name': CreateVM_Task, 'duration_secs': 0.588314} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.053034] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 942.053034] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.053034] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 942.053034] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 942.053034] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0b5e8e9-93b4-4f48-88dd-9f3382c9df35 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.062687] env[70020]: DEBUG oslo_vmware.api [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Waiting for the task: (returnval){ [ 942.062687] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]522606cf-c7fe-2b2b-263b-415ce63d6875" [ 942.062687] env[70020]: _type = "Task" [ 942.062687] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.073443] env[70020]: DEBUG oslo_vmware.api [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]522606cf-c7fe-2b2b-263b-415ce63d6875, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.134685] env[70020]: DEBUG nova.compute.manager [req-d11495a7-0955-4330-8c36-9a616bbdeb0c req-2f4b1ec5-0d72-4bda-a425-f724523fbed7 service nova] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Received event network-changed-4a49418e-b633-42e9-b84c-6a2ece113e59 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 942.134904] env[70020]: DEBUG nova.compute.manager [req-d11495a7-0955-4330-8c36-9a616bbdeb0c req-2f4b1ec5-0d72-4bda-a425-f724523fbed7 service nova] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Refreshing instance network info cache due to event network-changed-4a49418e-b633-42e9-b84c-6a2ece113e59. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 942.135139] env[70020]: DEBUG oslo_concurrency.lockutils [req-d11495a7-0955-4330-8c36-9a616bbdeb0c req-2f4b1ec5-0d72-4bda-a425-f724523fbed7 service nova] Acquiring lock "refresh_cache-9dec24d6-af8a-41b9-920c-e4420fc69417" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.135283] env[70020]: DEBUG oslo_concurrency.lockutils [req-d11495a7-0955-4330-8c36-9a616bbdeb0c req-2f4b1ec5-0d72-4bda-a425-f724523fbed7 service nova] Acquired lock "refresh_cache-9dec24d6-af8a-41b9-920c-e4420fc69417" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 942.135440] env[70020]: DEBUG nova.network.neutron [req-d11495a7-0955-4330-8c36-9a616bbdeb0c req-2f4b1ec5-0d72-4bda-a425-f724523fbed7 service nova] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Refreshing network info cache for port 4a49418e-b633-42e9-b84c-6a2ece113e59 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 942.137819] env[70020]: DEBUG nova.compute.manager [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 942.166704] env[70020]: DEBUG nova.virt.hardware [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 942.166912] env[70020]: DEBUG nova.virt.hardware [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 942.167295] env[70020]: DEBUG nova.virt.hardware [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 942.167434] env[70020]: DEBUG nova.virt.hardware [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 942.167503] env[70020]: DEBUG nova.virt.hardware [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 942.167590] env[70020]: DEBUG nova.virt.hardware [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 942.167796] env[70020]: DEBUG nova.virt.hardware [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 942.167955] env[70020]: DEBUG nova.virt.hardware [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 942.168145] env[70020]: DEBUG nova.virt.hardware [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 942.168354] env[70020]: DEBUG nova.virt.hardware [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 942.168471] env[70020]: DEBUG nova.virt.hardware [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 942.169475] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18321f51-635d-48df-8269-34561d36942d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.179008] env[70020]: DEBUG nova.network.neutron [-] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.180350] env[70020]: DEBUG oslo_concurrency.lockutils [req-5a126bea-3464-4a42-8371-011dd0ac08a9 req-47862750-c169-44be-8c34-d81a84713543 service nova] Releasing lock "refresh_cache-3a4f2342-58e7-436b-a779-0fa093b52409" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 942.181960] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceb54412-ae04-4233-9a3b-877f8e8e8f74 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.208554] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618555, 'name': CreateVM_Task, 'duration_secs': 0.464091} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.208755] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 942.210330] env[70020]: DEBUG oslo_concurrency.lockutils [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.384026] env[70020]: INFO nova.compute.manager [None req-c8fe72b9-8ae5-4786-88d0-25683afdcaa2 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Updating instance to original state: 'active' [ 942.398121] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7b0ce31a-b84b-4f93-8233-a2279c30bdf5 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.285s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 942.400411] env[70020]: DEBUG oslo_concurrency.lockutils [None req-be6f6e60-270c-4235-834e-ddbca95a8ef0 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.928s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 942.400411] env[70020]: DEBUG nova.objects.instance [None req-be6f6e60-270c-4235-834e-ddbca95a8ef0 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Lazy-loading 'resources' on Instance uuid 00232eca-da03-49ea-b62b-d9721739b0ec {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 942.438183] env[70020]: INFO nova.scheduler.client.report [None req-7b0ce31a-b84b-4f93-8233-a2279c30bdf5 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Deleted allocations for instance 422ca332-5952-443c-a22e-67b1b45df5b9 [ 942.576400] env[70020]: DEBUG oslo_vmware.api [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]522606cf-c7fe-2b2b-263b-415ce63d6875, 'name': SearchDatastore_Task, 'duration_secs': 0.017329} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.577013] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 942.577407] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 942.578087] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.578486] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 942.579128] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 942.579409] env[70020]: DEBUG oslo_concurrency.lockutils [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 942.579835] env[70020]: DEBUG oslo_concurrency.lockutils [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 942.580230] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-95908be8-7b64-424c-9ba0-c75b15065e72 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.584027] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39e96888-2f32-4899-8a92-b7cc0e3541f2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.591375] env[70020]: DEBUG oslo_vmware.api [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 942.591375] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]526c53b5-bfdc-1157-193c-a13ba305c769" [ 942.591375] env[70020]: _type = "Task" [ 942.591375] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.597617] env[70020]: DEBUG nova.network.neutron [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Successfully updated port: 85556b4a-7248-4583-8362-b471b8b0abbe {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 942.600458] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 942.600768] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 942.602415] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e730077-4d83-4611-85ca-c72ada545dc4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.612215] env[70020]: DEBUG oslo_vmware.api [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]526c53b5-bfdc-1157-193c-a13ba305c769, 'name': SearchDatastore_Task, 'duration_secs': 0.015096} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.613958] env[70020]: DEBUG oslo_concurrency.lockutils [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 942.614645] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 942.614645] env[70020]: DEBUG oslo_concurrency.lockutils [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.620415] env[70020]: DEBUG oslo_vmware.api [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Waiting for the task: (returnval){ [ 942.620415] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52e835dc-5930-5f51-631c-b85d498c6acb" [ 942.620415] env[70020]: _type = "Task" [ 942.620415] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.631886] env[70020]: DEBUG oslo_vmware.api [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52e835dc-5930-5f51-631c-b85d498c6acb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.689233] env[70020]: INFO nova.compute.manager [-] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Took 1.27 seconds to deallocate network for instance. [ 942.694739] env[70020]: DEBUG nova.objects.instance [None req-68dd9cf4-ff1e-4153-9ed6-8ab7572fa6d3 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Lazy-loading 'flavor' on Instance uuid 45926a02-d0fe-4274-ba47-b97b3e12e4cd {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 942.952857] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7b0ce31a-b84b-4f93-8233-a2279c30bdf5 tempest-ServerRescueTestJSONUnderV235-1838383190 tempest-ServerRescueTestJSONUnderV235-1838383190-project-member] Lock "422ca332-5952-443c-a22e-67b1b45df5b9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.183s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 942.968385] env[70020]: DEBUG nova.network.neutron [req-d11495a7-0955-4330-8c36-9a616bbdeb0c req-2f4b1ec5-0d72-4bda-a425-f724523fbed7 service nova] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Updated VIF entry in instance network info cache for port 4a49418e-b633-42e9-b84c-6a2ece113e59. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 942.968385] env[70020]: DEBUG nova.network.neutron [req-d11495a7-0955-4330-8c36-9a616bbdeb0c req-2f4b1ec5-0d72-4bda-a425-f724523fbed7 service nova] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Updating instance_info_cache with network_info: [{"id": "4a49418e-b633-42e9-b84c-6a2ece113e59", "address": "fa:16:3e:f3:05:c6", "network": {"id": "bff860aa-ea09-4260-84e8-d9ffd2c5bf4b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1893681432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd3733a000724aab9255cb498cecdfba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a49418e-b6", "ovs_interfaceid": "4a49418e-b633-42e9-b84c-6a2ece113e59", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.115086] env[70020]: DEBUG oslo_concurrency.lockutils [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Acquiring lock "refresh_cache-abc194e3-fb6a-4f2a-8886-e2777530a2a3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.115086] env[70020]: DEBUG oslo_concurrency.lockutils [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Acquired lock "refresh_cache-abc194e3-fb6a-4f2a-8886-e2777530a2a3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 943.115086] env[70020]: DEBUG nova.network.neutron [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 943.134818] env[70020]: DEBUG oslo_vmware.api [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52e835dc-5930-5f51-631c-b85d498c6acb, 'name': SearchDatastore_Task, 'duration_secs': 0.021154} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.140118] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b738484-c22d-4dd5-a6e7-4bde8d348ba5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.147183] env[70020]: DEBUG oslo_vmware.api [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Waiting for the task: (returnval){ [ 943.147183] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]525268a7-9ad4-51a9-6508-afae826adc00" [ 943.147183] env[70020]: _type = "Task" [ 943.147183] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.162353] env[70020]: DEBUG oslo_vmware.api [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]525268a7-9ad4-51a9-6508-afae826adc00, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.195411] env[70020]: DEBUG oslo_concurrency.lockutils [None req-52468b9b-9f69-4190-b7fb-0a6185aa7dca tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 943.204379] env[70020]: DEBUG oslo_concurrency.lockutils [None req-68dd9cf4-ff1e-4153-9ed6-8ab7572fa6d3 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Acquiring lock "refresh_cache-45926a02-d0fe-4274-ba47-b97b3e12e4cd" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.204682] env[70020]: DEBUG oslo_concurrency.lockutils [None req-68dd9cf4-ff1e-4153-9ed6-8ab7572fa6d3 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Acquired lock "refresh_cache-45926a02-d0fe-4274-ba47-b97b3e12e4cd" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 943.204828] env[70020]: DEBUG nova.network.neutron [None req-68dd9cf4-ff1e-4153-9ed6-8ab7572fa6d3 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 943.205423] env[70020]: DEBUG nova.objects.instance [None req-68dd9cf4-ff1e-4153-9ed6-8ab7572fa6d3 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Lazy-loading 'info_cache' on Instance uuid 45926a02-d0fe-4274-ba47-b97b3e12e4cd {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 943.443102] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28017936-f361-4aa0-850d-1f17386d7a3a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.451835] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f3e6309-8deb-4d69-9fdd-7f3dd20e9a63 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.483667] env[70020]: DEBUG oslo_concurrency.lockutils [req-d11495a7-0955-4330-8c36-9a616bbdeb0c req-2f4b1ec5-0d72-4bda-a425-f724523fbed7 service nova] Releasing lock "refresh_cache-9dec24d6-af8a-41b9-920c-e4420fc69417" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 943.483978] env[70020]: DEBUG nova.compute.manager [req-d11495a7-0955-4330-8c36-9a616bbdeb0c req-2f4b1ec5-0d72-4bda-a425-f724523fbed7 service nova] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Received event network-vif-deleted-089cfbb7-4a17-4371-949f-06f761b4c32b {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 943.484171] env[70020]: INFO nova.compute.manager [req-d11495a7-0955-4330-8c36-9a616bbdeb0c req-2f4b1ec5-0d72-4bda-a425-f724523fbed7 service nova] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Neutron deleted interface 089cfbb7-4a17-4371-949f-06f761b4c32b; detaching it from the instance and deleting it from the info cache [ 943.484490] env[70020]: DEBUG nova.network.neutron [req-d11495a7-0955-4330-8c36-9a616bbdeb0c req-2f4b1ec5-0d72-4bda-a425-f724523fbed7 service nova] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.489200] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a04a56bf-2383-4e81-934a-419a54abcc61 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.495908] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef6fbe3a-ecc0-4fc8-9cc4-cbae2bef16e7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.514036] env[70020]: DEBUG nova.compute.provider_tree [None req-be6f6e60-270c-4235-834e-ddbca95a8ef0 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 943.596333] env[70020]: DEBUG oslo_vmware.rw_handles [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523fa414-5a55-8467-4796-39e7876b765e/disk-0.vmdk. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 943.597246] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1df111b-d732-4ec5-a5e8-91a40196092a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.604655] env[70020]: DEBUG oslo_vmware.rw_handles [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523fa414-5a55-8467-4796-39e7876b765e/disk-0.vmdk is in state: ready. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 943.604825] env[70020]: ERROR oslo_vmware.rw_handles [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523fa414-5a55-8467-4796-39e7876b765e/disk-0.vmdk due to incomplete transfer. [ 943.605062] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-8714f87f-cf1a-4fa3-9d6c-d46c10817034 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.615435] env[70020]: DEBUG oslo_vmware.rw_handles [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523fa414-5a55-8467-4796-39e7876b765e/disk-0.vmdk. {{(pid=70020) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 943.615655] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Uploaded image d9b0c957-d1f7-448d-bc65-21d831f5ff6b to the Glance image server {{(pid=70020) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 943.618231] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Destroying the VM {{(pid=70020) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 943.620312] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-0155f5c4-91bc-4d4c-b039-17615d711845 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.630702] env[70020]: DEBUG oslo_vmware.api [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 943.630702] env[70020]: value = "task-3618556" [ 943.630702] env[70020]: _type = "Task" [ 943.630702] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.637484] env[70020]: DEBUG oslo_vmware.api [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618556, 'name': Destroy_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.659128] env[70020]: DEBUG oslo_vmware.api [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]525268a7-9ad4-51a9-6508-afae826adc00, 'name': SearchDatastore_Task, 'duration_secs': 0.011757} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.659569] env[70020]: DEBUG nova.network.neutron [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 943.662107] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 943.662412] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] ff4e958d-0068-429f-af76-5e7d4dd147f3/ff4e958d-0068-429f-af76-5e7d4dd147f3.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 943.662783] env[70020]: DEBUG oslo_concurrency.lockutils [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 943.663020] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 943.663277] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6ac6f52a-e9ea-42ce-9bc7-d246da117a9c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.666145] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-08dd239c-523b-493a-aaad-d2055d033ee9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.676211] env[70020]: DEBUG oslo_vmware.api [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Waiting for the task: (returnval){ [ 943.676211] env[70020]: value = "task-3618557" [ 943.676211] env[70020]: _type = "Task" [ 943.676211] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.682196] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 943.682794] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 943.689021] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ed4768a-dd90-46dc-8f2d-8bc66a9d92cf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.693804] env[70020]: DEBUG oslo_vmware.api [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': task-3618557, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.697650] env[70020]: DEBUG oslo_vmware.api [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 943.697650] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5272dd75-e42f-8835-d3fa-73d431ca7a94" [ 943.697650] env[70020]: _type = "Task" [ 943.697650] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.707423] env[70020]: DEBUG oslo_vmware.api [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5272dd75-e42f-8835-d3fa-73d431ca7a94, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.708625] env[70020]: DEBUG nova.objects.base [None req-68dd9cf4-ff1e-4153-9ed6-8ab7572fa6d3 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Object Instance<45926a02-d0fe-4274-ba47-b97b3e12e4cd> lazy-loaded attributes: flavor,info_cache {{(pid=70020) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 943.855701] env[70020]: DEBUG nova.network.neutron [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Updating instance_info_cache with network_info: [{"id": "85556b4a-7248-4583-8362-b471b8b0abbe", "address": "fa:16:3e:d9:fd:8b", "network": {"id": "67717fa4-6902-4ab3-b920-66154dce21da", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-455223479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cb58ecf407874509b7072d5d2cf838a4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "022c7dd5-6c13-49c7-84f4-8b6c1fda4fb7", "external-id": "nsx-vlan-transportzone-694", "segmentation_id": 694, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85556b4a-72", "ovs_interfaceid": "85556b4a-7248-4583-8362-b471b8b0abbe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.987830] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c8df4cd9-daf1-4a0f-9ef5-b705e3394de3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.999457] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74f8aae7-1a90-42ca-8374-e50fa5add3aa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.016813] env[70020]: DEBUG nova.scheduler.client.report [None req-be6f6e60-270c-4235-834e-ddbca95a8ef0 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 944.051012] env[70020]: DEBUG nova.compute.manager [req-d11495a7-0955-4330-8c36-9a616bbdeb0c req-2f4b1ec5-0d72-4bda-a425-f724523fbed7 service nova] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Detach interface failed, port_id=089cfbb7-4a17-4371-949f-06f761b4c32b, reason: Instance 4335f92a-897a-4779-be70-4f0754a66d53 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 944.142530] env[70020]: DEBUG oslo_vmware.api [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618556, 'name': Destroy_Task, 'duration_secs': 0.39397} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.142873] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Destroyed the VM [ 944.143083] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Deleting Snapshot of the VM instance {{(pid=70020) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 944.143365] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-6e0a9717-83c2-4271-bcf3-4c6698d3aeff {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.157387] env[70020]: DEBUG oslo_vmware.api [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 944.157387] env[70020]: value = "task-3618558" [ 944.157387] env[70020]: _type = "Task" [ 944.157387] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.170465] env[70020]: DEBUG oslo_vmware.api [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618558, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.193700] env[70020]: DEBUG oslo_vmware.api [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': task-3618557, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.211327] env[70020]: DEBUG oslo_vmware.api [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5272dd75-e42f-8835-d3fa-73d431ca7a94, 'name': SearchDatastore_Task, 'duration_secs': 0.015005} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.214785] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bfa298d7-b138-4c71-bb4e-2bf1be602f75 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.221407] env[70020]: DEBUG oslo_vmware.api [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 944.221407] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52327be8-c9f5-5f89-f9cb-b9f29a64f3e2" [ 944.221407] env[70020]: _type = "Task" [ 944.221407] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.230906] env[70020]: DEBUG oslo_vmware.api [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52327be8-c9f5-5f89-f9cb-b9f29a64f3e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.237465] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8d64c5e5-0b9b-445c-95f1-ee3f96483c58 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquiring lock "b53f55c1-1867-410c-9c53-f552ff30d697" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 944.237714] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8d64c5e5-0b9b-445c-95f1-ee3f96483c58 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Lock "b53f55c1-1867-410c-9c53-f552ff30d697" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 944.237915] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8d64c5e5-0b9b-445c-95f1-ee3f96483c58 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquiring lock "b53f55c1-1867-410c-9c53-f552ff30d697-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 944.238109] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8d64c5e5-0b9b-445c-95f1-ee3f96483c58 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Lock "b53f55c1-1867-410c-9c53-f552ff30d697-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 944.238280] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8d64c5e5-0b9b-445c-95f1-ee3f96483c58 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Lock "b53f55c1-1867-410c-9c53-f552ff30d697-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 944.240627] env[70020]: INFO nova.compute.manager [None req-8d64c5e5-0b9b-445c-95f1-ee3f96483c58 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Terminating instance [ 944.358454] env[70020]: DEBUG oslo_concurrency.lockutils [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Releasing lock "refresh_cache-abc194e3-fb6a-4f2a-8886-e2777530a2a3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 944.359016] env[70020]: DEBUG nova.compute.manager [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Instance network_info: |[{"id": "85556b4a-7248-4583-8362-b471b8b0abbe", "address": "fa:16:3e:d9:fd:8b", "network": {"id": "67717fa4-6902-4ab3-b920-66154dce21da", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-455223479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cb58ecf407874509b7072d5d2cf838a4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "022c7dd5-6c13-49c7-84f4-8b6c1fda4fb7", "external-id": "nsx-vlan-transportzone-694", "segmentation_id": 694, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85556b4a-72", "ovs_interfaceid": "85556b4a-7248-4583-8362-b471b8b0abbe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 944.359485] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d9:fd:8b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '022c7dd5-6c13-49c7-84f4-8b6c1fda4fb7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '85556b4a-7248-4583-8362-b471b8b0abbe', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 944.369889] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Creating folder: Project (cb58ecf407874509b7072d5d2cf838a4). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 944.376834] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-080d972a-b810-47a0-b8af-188f0088e0a1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.394675] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Created folder: Project (cb58ecf407874509b7072d5d2cf838a4) in parent group-v721521. [ 944.394675] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Creating folder: Instances. Parent ref: group-v721739. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 944.394675] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6ce05dbb-2aec-4660-8dfa-05853ca8496c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.404562] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Created folder: Instances in parent group-v721739. [ 944.404830] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 944.405053] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 944.405302] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f7294681-a579-4487-895a-3bec9139d836 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.427822] env[70020]: DEBUG nova.compute.manager [req-37f2ca58-04d5-4fa1-bc9d-5c3d51297eaa req-7425d7af-2f35-4566-8412-d9d04bf81700 service nova] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Received event network-vif-plugged-85556b4a-7248-4583-8362-b471b8b0abbe {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 944.428035] env[70020]: DEBUG oslo_concurrency.lockutils [req-37f2ca58-04d5-4fa1-bc9d-5c3d51297eaa req-7425d7af-2f35-4566-8412-d9d04bf81700 service nova] Acquiring lock "abc194e3-fb6a-4f2a-8886-e2777530a2a3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 944.428254] env[70020]: DEBUG oslo_concurrency.lockutils [req-37f2ca58-04d5-4fa1-bc9d-5c3d51297eaa req-7425d7af-2f35-4566-8412-d9d04bf81700 service nova] Lock "abc194e3-fb6a-4f2a-8886-e2777530a2a3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 944.428415] env[70020]: DEBUG oslo_concurrency.lockutils [req-37f2ca58-04d5-4fa1-bc9d-5c3d51297eaa req-7425d7af-2f35-4566-8412-d9d04bf81700 service nova] Lock "abc194e3-fb6a-4f2a-8886-e2777530a2a3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 944.428578] env[70020]: DEBUG nova.compute.manager [req-37f2ca58-04d5-4fa1-bc9d-5c3d51297eaa req-7425d7af-2f35-4566-8412-d9d04bf81700 service nova] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] No waiting events found dispatching network-vif-plugged-85556b4a-7248-4583-8362-b471b8b0abbe {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 944.428731] env[70020]: WARNING nova.compute.manager [req-37f2ca58-04d5-4fa1-bc9d-5c3d51297eaa req-7425d7af-2f35-4566-8412-d9d04bf81700 service nova] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Received unexpected event network-vif-plugged-85556b4a-7248-4583-8362-b471b8b0abbe for instance with vm_state building and task_state spawning. [ 944.428874] env[70020]: DEBUG nova.compute.manager [req-37f2ca58-04d5-4fa1-bc9d-5c3d51297eaa req-7425d7af-2f35-4566-8412-d9d04bf81700 service nova] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Received event network-changed-85556b4a-7248-4583-8362-b471b8b0abbe {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 944.429036] env[70020]: DEBUG nova.compute.manager [req-37f2ca58-04d5-4fa1-bc9d-5c3d51297eaa req-7425d7af-2f35-4566-8412-d9d04bf81700 service nova] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Refreshing instance network info cache due to event network-changed-85556b4a-7248-4583-8362-b471b8b0abbe. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 944.429215] env[70020]: DEBUG oslo_concurrency.lockutils [req-37f2ca58-04d5-4fa1-bc9d-5c3d51297eaa req-7425d7af-2f35-4566-8412-d9d04bf81700 service nova] Acquiring lock "refresh_cache-abc194e3-fb6a-4f2a-8886-e2777530a2a3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.429441] env[70020]: DEBUG oslo_concurrency.lockutils [req-37f2ca58-04d5-4fa1-bc9d-5c3d51297eaa req-7425d7af-2f35-4566-8412-d9d04bf81700 service nova] Acquired lock "refresh_cache-abc194e3-fb6a-4f2a-8886-e2777530a2a3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 944.429603] env[70020]: DEBUG nova.network.neutron [req-37f2ca58-04d5-4fa1-bc9d-5c3d51297eaa req-7425d7af-2f35-4566-8412-d9d04bf81700 service nova] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Refreshing network info cache for port 85556b4a-7248-4583-8362-b471b8b0abbe {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 944.439901] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 944.439901] env[70020]: value = "task-3618561" [ 944.439901] env[70020]: _type = "Task" [ 944.439901] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.451385] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618561, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.500704] env[70020]: DEBUG nova.network.neutron [None req-68dd9cf4-ff1e-4153-9ed6-8ab7572fa6d3 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Updating instance_info_cache with network_info: [{"id": "7648a826-e268-4333-96ce-f336ff254b66", "address": "fa:16:3e:99:29:9b", "network": {"id": "28d7e64a-7917-46ed-91bf-c239aad81e05", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-285076897-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f14ea4f517a04de69f8bc56a19f2be8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "30c39e9a-a798-4f25-a48c-91f786ba332c", "external-id": "nsx-vlan-transportzone-438", "segmentation_id": 438, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7648a826-e2", "ovs_interfaceid": "7648a826-e268-4333-96ce-f336ff254b66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.522733] env[70020]: DEBUG oslo_concurrency.lockutils [None req-be6f6e60-270c-4235-834e-ddbca95a8ef0 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.123s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 944.526056] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29371888-e02a-43ba-bcf6-3641d5f1ceac tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.287s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 944.526809] env[70020]: DEBUG nova.objects.instance [None req-29371888-e02a-43ba-bcf6-3641d5f1ceac tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Lazy-loading 'resources' on Instance uuid 8bff6907-c2b0-4ad1-9298-b2d622d33fde {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 944.551151] env[70020]: INFO nova.scheduler.client.report [None req-be6f6e60-270c-4235-834e-ddbca95a8ef0 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Deleted allocations for instance 00232eca-da03-49ea-b62b-d9721739b0ec [ 944.669780] env[70020]: DEBUG oslo_vmware.api [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618558, 'name': RemoveSnapshot_Task, 'duration_secs': 0.382252} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.670062] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Deleted Snapshot of the VM instance {{(pid=70020) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 944.670342] env[70020]: DEBUG nova.compute.manager [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 944.671149] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63c1f75e-6df3-4c2b-a699-a8f70c46d2bf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.695563] env[70020]: DEBUG oslo_vmware.api [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': task-3618557, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.59691} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.695563] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] ff4e958d-0068-429f-af76-5e7d4dd147f3/ff4e958d-0068-429f-af76-5e7d4dd147f3.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 944.695860] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 944.696147] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-34e03622-3108-493a-9b54-df18f3185af1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.709698] env[70020]: DEBUG oslo_vmware.api [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Waiting for the task: (returnval){ [ 944.709698] env[70020]: value = "task-3618562" [ 944.709698] env[70020]: _type = "Task" [ 944.709698] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.720763] env[70020]: DEBUG oslo_vmware.api [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': task-3618562, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.732407] env[70020]: DEBUG oslo_vmware.api [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52327be8-c9f5-5f89-f9cb-b9f29a64f3e2, 'name': SearchDatastore_Task, 'duration_secs': 0.014917} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.732670] env[70020]: DEBUG oslo_concurrency.lockutils [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 944.732945] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 9dec24d6-af8a-41b9-920c-e4420fc69417/9dec24d6-af8a-41b9-920c-e4420fc69417.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 944.733480] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-08dd5b23-18b5-4050-9976-390308a83542 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.744498] env[70020]: DEBUG nova.compute.manager [None req-8d64c5e5-0b9b-445c-95f1-ee3f96483c58 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 944.744818] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8d64c5e5-0b9b-445c-95f1-ee3f96483c58 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 944.746349] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57c7e75f-6ca8-4652-840c-a79eab03cf42 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.749798] env[70020]: DEBUG oslo_vmware.api [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 944.749798] env[70020]: value = "task-3618563" [ 944.749798] env[70020]: _type = "Task" [ 944.749798] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.756954] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d64c5e5-0b9b-445c-95f1-ee3f96483c58 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 944.757878] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-94d9ef66-80e7-4d8c-b828-8205dcdcf78b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.764094] env[70020]: DEBUG oslo_vmware.api [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618563, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.771436] env[70020]: DEBUG oslo_vmware.api [None req-8d64c5e5-0b9b-445c-95f1-ee3f96483c58 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for the task: (returnval){ [ 944.771436] env[70020]: value = "task-3618564" [ 944.771436] env[70020]: _type = "Task" [ 944.771436] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.784124] env[70020]: DEBUG oslo_vmware.api [None req-8d64c5e5-0b9b-445c-95f1-ee3f96483c58 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618564, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.952062] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618561, 'name': CreateVM_Task, 'duration_secs': 0.408762} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.952264] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 944.953117] env[70020]: DEBUG oslo_concurrency.lockutils [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.953191] env[70020]: DEBUG oslo_concurrency.lockutils [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 944.954114] env[70020]: DEBUG oslo_concurrency.lockutils [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 944.954405] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-abb194de-65ca-4c35-8dd9-380a8113c28c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.962858] env[70020]: DEBUG oslo_vmware.api [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Waiting for the task: (returnval){ [ 944.962858] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]528c980c-8977-ddad-e8dc-c4346cfb1ad9" [ 944.962858] env[70020]: _type = "Task" [ 944.962858] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.975707] env[70020]: DEBUG oslo_vmware.api [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]528c980c-8977-ddad-e8dc-c4346cfb1ad9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.009849] env[70020]: DEBUG oslo_concurrency.lockutils [None req-68dd9cf4-ff1e-4153-9ed6-8ab7572fa6d3 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Releasing lock "refresh_cache-45926a02-d0fe-4274-ba47-b97b3e12e4cd" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 945.060083] env[70020]: DEBUG oslo_concurrency.lockutils [None req-be6f6e60-270c-4235-834e-ddbca95a8ef0 tempest-ServersTestMultiNic-1935476789 tempest-ServersTestMultiNic-1935476789-project-member] Lock "00232eca-da03-49ea-b62b-d9721739b0ec" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.298s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 945.195377] env[70020]: INFO nova.compute.manager [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Shelve offloading [ 945.229156] env[70020]: DEBUG oslo_vmware.api [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': task-3618562, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.104954} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.229939] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 945.231430] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1869a8b1-5907-4f10-a0b3-bd41692dbd5d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.254069] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] ff4e958d-0068-429f-af76-5e7d4dd147f3/ff4e958d-0068-429f-af76-5e7d4dd147f3.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 945.257411] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-46df58dc-bea3-4041-b82d-c9a2a6a77741 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.291107] env[70020]: DEBUG oslo_vmware.api [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Waiting for the task: (returnval){ [ 945.291107] env[70020]: value = "task-3618565" [ 945.291107] env[70020]: _type = "Task" [ 945.291107] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.291107] env[70020]: DEBUG oslo_vmware.api [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618563, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.528878} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.291594] env[70020]: DEBUG oslo_vmware.api [None req-8d64c5e5-0b9b-445c-95f1-ee3f96483c58 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618564, 'name': PowerOffVM_Task, 'duration_secs': 0.269696} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.296846] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 9dec24d6-af8a-41b9-920c-e4420fc69417/9dec24d6-af8a-41b9-920c-e4420fc69417.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 945.297099] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 945.297356] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d64c5e5-0b9b-445c-95f1-ee3f96483c58 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 945.297514] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8d64c5e5-0b9b-445c-95f1-ee3f96483c58 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 945.300941] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3147ec31-e19a-4cc5-b77c-c050df9ddbde {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.303096] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-556db4aa-4ac4-4cdd-8ff6-aa68297a8399 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.312316] env[70020]: DEBUG oslo_vmware.api [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': task-3618565, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.313794] env[70020]: DEBUG oslo_vmware.api [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 945.313794] env[70020]: value = "task-3618567" [ 945.313794] env[70020]: _type = "Task" [ 945.313794] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.325662] env[70020]: DEBUG oslo_vmware.api [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618567, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.356247] env[70020]: DEBUG nova.network.neutron [req-37f2ca58-04d5-4fa1-bc9d-5c3d51297eaa req-7425d7af-2f35-4566-8412-d9d04bf81700 service nova] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Updated VIF entry in instance network info cache for port 85556b4a-7248-4583-8362-b471b8b0abbe. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 945.356618] env[70020]: DEBUG nova.network.neutron [req-37f2ca58-04d5-4fa1-bc9d-5c3d51297eaa req-7425d7af-2f35-4566-8412-d9d04bf81700 service nova] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Updating instance_info_cache with network_info: [{"id": "85556b4a-7248-4583-8362-b471b8b0abbe", "address": "fa:16:3e:d9:fd:8b", "network": {"id": "67717fa4-6902-4ab3-b920-66154dce21da", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-455223479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cb58ecf407874509b7072d5d2cf838a4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "022c7dd5-6c13-49c7-84f4-8b6c1fda4fb7", "external-id": "nsx-vlan-transportzone-694", "segmentation_id": 694, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85556b4a-72", "ovs_interfaceid": "85556b4a-7248-4583-8362-b471b8b0abbe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.385791] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8d64c5e5-0b9b-445c-95f1-ee3f96483c58 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 945.386096] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8d64c5e5-0b9b-445c-95f1-ee3f96483c58 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 945.386285] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d64c5e5-0b9b-445c-95f1-ee3f96483c58 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Deleting the datastore file [datastore1] b53f55c1-1867-410c-9c53-f552ff30d697 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 945.386586] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b519ceec-6423-4be5-9163-4f2dfe12b48f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.394297] env[70020]: DEBUG oslo_vmware.api [None req-8d64c5e5-0b9b-445c-95f1-ee3f96483c58 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for the task: (returnval){ [ 945.394297] env[70020]: value = "task-3618568" [ 945.394297] env[70020]: _type = "Task" [ 945.394297] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.409193] env[70020]: DEBUG oslo_vmware.api [None req-8d64c5e5-0b9b-445c-95f1-ee3f96483c58 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618568, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.473784] env[70020]: DEBUG oslo_vmware.api [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]528c980c-8977-ddad-e8dc-c4346cfb1ad9, 'name': SearchDatastore_Task, 'duration_secs': 0.074921} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.474114] env[70020]: DEBUG oslo_concurrency.lockutils [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 945.474396] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 945.474610] env[70020]: DEBUG oslo_concurrency.lockutils [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.474968] env[70020]: DEBUG oslo_concurrency.lockutils [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 945.474968] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 945.475221] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e7d42390-8474-40b9-8737-3f748b0400ed {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.491806] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 945.492198] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 945.492832] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc594547-417c-44d9-9b31-4231fdbe633e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.499619] env[70020]: DEBUG oslo_vmware.api [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Waiting for the task: (returnval){ [ 945.499619] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]528f02d6-78a1-e8e5-aafc-75c5a8d6e69e" [ 945.499619] env[70020]: _type = "Task" [ 945.499619] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.512514] env[70020]: DEBUG oslo_vmware.api [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]528f02d6-78a1-e8e5-aafc-75c5a8d6e69e, 'name': SearchDatastore_Task, 'duration_secs': 0.010389} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.518746] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-552e4c6d-4380-448a-9fe2-67d6ea1ed6ed {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.523449] env[70020]: DEBUG oslo_vmware.api [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Waiting for the task: (returnval){ [ 945.523449] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52720c9a-48ec-8bf0-269b-509c653848bd" [ 945.523449] env[70020]: _type = "Task" [ 945.523449] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.533861] env[70020]: DEBUG oslo_vmware.api [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52720c9a-48ec-8bf0-269b-509c653848bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.628922] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-104f3ab4-7d76-44ed-b237-3117f8ec1100 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.639259] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d9cb0aa-67e5-4913-a97c-c748c57c8537 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.674682] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6e8d07b-48ec-45b6-969e-af78029f39c4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.683418] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4731811e-c064-43fb-861b-9964874db3e7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.699606] env[70020]: DEBUG nova.compute.provider_tree [None req-29371888-e02a-43ba-bcf6-3641d5f1ceac tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 945.704746] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 945.705035] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-68f0b597-941b-4f3f-ba37-15aa31dba872 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.713368] env[70020]: DEBUG oslo_vmware.api [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 945.713368] env[70020]: value = "task-3618569" [ 945.713368] env[70020]: _type = "Task" [ 945.713368] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.722837] env[70020]: DEBUG oslo_vmware.api [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618569, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.807430] env[70020]: DEBUG oslo_vmware.api [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': task-3618565, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.825040] env[70020]: DEBUG oslo_vmware.api [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618567, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.159482} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.825040] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 945.825624] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88f31f83-91ae-4576-91f3-5480517fcb5b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.850391] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] 9dec24d6-af8a-41b9-920c-e4420fc69417/9dec24d6-af8a-41b9-920c-e4420fc69417.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 945.850739] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f6d8d30d-2fcf-4003-b7d1-4cb65372c864 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.866102] env[70020]: DEBUG oslo_concurrency.lockutils [req-37f2ca58-04d5-4fa1-bc9d-5c3d51297eaa req-7425d7af-2f35-4566-8412-d9d04bf81700 service nova] Releasing lock "refresh_cache-abc194e3-fb6a-4f2a-8886-e2777530a2a3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 945.874693] env[70020]: DEBUG oslo_vmware.api [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 945.874693] env[70020]: value = "task-3618570" [ 945.874693] env[70020]: _type = "Task" [ 945.874693] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.887305] env[70020]: DEBUG oslo_vmware.api [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618570, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.903921] env[70020]: DEBUG oslo_vmware.api [None req-8d64c5e5-0b9b-445c-95f1-ee3f96483c58 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618568, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.163936} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.904252] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d64c5e5-0b9b-445c-95f1-ee3f96483c58 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 945.904479] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8d64c5e5-0b9b-445c-95f1-ee3f96483c58 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 945.904727] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8d64c5e5-0b9b-445c-95f1-ee3f96483c58 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 945.904929] env[70020]: INFO nova.compute.manager [None req-8d64c5e5-0b9b-445c-95f1-ee3f96483c58 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Took 1.16 seconds to destroy the instance on the hypervisor. [ 945.905234] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8d64c5e5-0b9b-445c-95f1-ee3f96483c58 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 945.905486] env[70020]: DEBUG nova.compute.manager [-] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 945.905617] env[70020]: DEBUG nova.network.neutron [-] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 946.022546] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-68dd9cf4-ff1e-4153-9ed6-8ab7572fa6d3 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 946.022546] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b66ef8ca-a3a4-407b-831c-5a7da07f8f58 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.035630] env[70020]: DEBUG oslo_vmware.api [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52720c9a-48ec-8bf0-269b-509c653848bd, 'name': SearchDatastore_Task, 'duration_secs': 0.010252} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.036477] env[70020]: DEBUG oslo_concurrency.lockutils [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 946.036751] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] abc194e3-fb6a-4f2a-8886-e2777530a2a3/abc194e3-fb6a-4f2a-8886-e2777530a2a3.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 946.037038] env[70020]: DEBUG oslo_vmware.api [None req-68dd9cf4-ff1e-4153-9ed6-8ab7572fa6d3 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Waiting for the task: (returnval){ [ 946.037038] env[70020]: value = "task-3618571" [ 946.037038] env[70020]: _type = "Task" [ 946.037038] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.037233] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9f317ac2-3992-496d-b9b9-c1b81c07bcc7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.048850] env[70020]: DEBUG oslo_vmware.api [None req-68dd9cf4-ff1e-4153-9ed6-8ab7572fa6d3 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618571, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.050613] env[70020]: DEBUG oslo_vmware.api [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Waiting for the task: (returnval){ [ 946.050613] env[70020]: value = "task-3618572" [ 946.050613] env[70020]: _type = "Task" [ 946.050613] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.058654] env[70020]: DEBUG oslo_vmware.api [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Task: {'id': task-3618572, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.205028] env[70020]: DEBUG nova.scheduler.client.report [None req-29371888-e02a-43ba-bcf6-3641d5f1ceac tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 946.228085] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] VM already powered off {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 946.228329] env[70020]: DEBUG nova.compute.manager [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 946.229233] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36f12e26-b9d0-4183-a924-022a96b8fea9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.239596] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "refresh_cache-a8982c31-ea86-4a8d-b8c6-006263ef41f8" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 946.239933] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquired lock "refresh_cache-a8982c31-ea86-4a8d-b8c6-006263ef41f8" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 946.240095] env[70020]: DEBUG nova.network.neutron [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 946.309097] env[70020]: DEBUG oslo_vmware.api [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': task-3618565, 'name': ReconfigVM_Task, 'duration_secs': 0.809052} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.309097] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Reconfigured VM instance instance-0000004c to attach disk [datastore1] ff4e958d-0068-429f-af76-5e7d4dd147f3/ff4e958d-0068-429f-af76-5e7d4dd147f3.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 946.309097] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d24aa002-9d9b-4dd1-921d-91657c7c6354 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.318190] env[70020]: DEBUG oslo_vmware.api [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Waiting for the task: (returnval){ [ 946.318190] env[70020]: value = "task-3618573" [ 946.318190] env[70020]: _type = "Task" [ 946.318190] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.327896] env[70020]: DEBUG oslo_vmware.api [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': task-3618573, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.384938] env[70020]: DEBUG oslo_vmware.api [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618570, 'name': ReconfigVM_Task, 'duration_secs': 0.336724} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.385976] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Reconfigured VM instance instance-0000004e to attach disk [datastore1] 9dec24d6-af8a-41b9-920c-e4420fc69417/9dec24d6-af8a-41b9-920c-e4420fc69417.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 946.387204] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d895ddb3-8925-4c0c-9e9d-42004d05057a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.398175] env[70020]: DEBUG oslo_vmware.api [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 946.398175] env[70020]: value = "task-3618574" [ 946.398175] env[70020]: _type = "Task" [ 946.398175] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.421094] env[70020]: DEBUG oslo_vmware.api [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618574, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.536169] env[70020]: DEBUG nova.compute.manager [req-1853b984-bb1f-4f7e-a50b-7f7b9f511a7d req-4b334a61-d881-4f14-9679-a824990e12d8 service nova] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Received event network-vif-deleted-ac1e36da-5de5-4451-a9e7-39165ab5f152 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 946.536511] env[70020]: INFO nova.compute.manager [req-1853b984-bb1f-4f7e-a50b-7f7b9f511a7d req-4b334a61-d881-4f14-9679-a824990e12d8 service nova] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Neutron deleted interface ac1e36da-5de5-4451-a9e7-39165ab5f152; detaching it from the instance and deleting it from the info cache [ 946.536743] env[70020]: DEBUG nova.network.neutron [req-1853b984-bb1f-4f7e-a50b-7f7b9f511a7d req-4b334a61-d881-4f14-9679-a824990e12d8 service nova] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.553879] env[70020]: DEBUG oslo_vmware.api [None req-68dd9cf4-ff1e-4153-9ed6-8ab7572fa6d3 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618571, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.567895] env[70020]: DEBUG oslo_vmware.api [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Task: {'id': task-3618572, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.694820] env[70020]: DEBUG nova.network.neutron [-] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.714161] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29371888-e02a-43ba-bcf6-3641d5f1ceac tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.187s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 946.716039] env[70020]: DEBUG oslo_concurrency.lockutils [None req-521ffcfb-6b37-4755-abf4-a675e776b827 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.093s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 946.716039] env[70020]: DEBUG nova.objects.instance [None req-521ffcfb-6b37-4755-abf4-a675e776b827 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Lazy-loading 'resources' on Instance uuid ef0d716a-080e-4167-bd34-b2c660b95c88 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 946.737790] env[70020]: INFO nova.scheduler.client.report [None req-29371888-e02a-43ba-bcf6-3641d5f1ceac tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Deleted allocations for instance 8bff6907-c2b0-4ad1-9298-b2d622d33fde [ 946.831101] env[70020]: DEBUG oslo_vmware.api [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': task-3618573, 'name': Rename_Task, 'duration_secs': 0.252031} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.831101] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 946.831101] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2a9613eb-6b0e-4338-a56c-aa1fb7e2a3da {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.838678] env[70020]: DEBUG oslo_vmware.api [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Waiting for the task: (returnval){ [ 946.838678] env[70020]: value = "task-3618575" [ 946.838678] env[70020]: _type = "Task" [ 946.838678] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.848626] env[70020]: DEBUG oslo_vmware.api [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': task-3618575, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.912026] env[70020]: DEBUG oslo_vmware.api [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618574, 'name': Rename_Task, 'duration_secs': 0.436845} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.912026] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 946.912026] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8b463982-54df-4f7b-8bf0-a75a78c25870 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.917971] env[70020]: DEBUG oslo_vmware.api [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 946.917971] env[70020]: value = "task-3618576" [ 946.917971] env[70020]: _type = "Task" [ 946.917971] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.926966] env[70020]: DEBUG oslo_vmware.api [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618576, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.036013] env[70020]: DEBUG nova.network.neutron [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Updating instance_info_cache with network_info: [{"id": "40bf9877-260b-49fc-85fd-307072a733f1", "address": "fa:16:3e:c1:4b:73", "network": {"id": "83a3ee04-e0ff-40e7-ae51-c463add43abb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2003290189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cfa7d3b1f5a14c60b19cde5030c2f0a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40bf9877-26", "ovs_interfaceid": "40bf9877-260b-49fc-85fd-307072a733f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.041718] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-70ba2923-06a0-4b2f-b177-1e6faa31c44b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.061541] env[70020]: DEBUG oslo_vmware.api [None req-68dd9cf4-ff1e-4153-9ed6-8ab7572fa6d3 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618571, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.066134] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b3090ee-d167-4d43-b7be-3f472cd8fa82 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.081795] env[70020]: DEBUG oslo_vmware.api [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Task: {'id': task-3618572, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.697864} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.082507] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] abc194e3-fb6a-4f2a-8886-e2777530a2a3/abc194e3-fb6a-4f2a-8886-e2777530a2a3.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 947.082729] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 947.082989] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1d788116-2c7a-4d58-9b12-faaea4ebb207 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.091337] env[70020]: DEBUG oslo_vmware.api [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Waiting for the task: (returnval){ [ 947.091337] env[70020]: value = "task-3618577" [ 947.091337] env[70020]: _type = "Task" [ 947.091337] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.102354] env[70020]: DEBUG oslo_vmware.api [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Task: {'id': task-3618577, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.117077] env[70020]: DEBUG nova.compute.manager [req-1853b984-bb1f-4f7e-a50b-7f7b9f511a7d req-4b334a61-d881-4f14-9679-a824990e12d8 service nova] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Detach interface failed, port_id=ac1e36da-5de5-4451-a9e7-39165ab5f152, reason: Instance b53f55c1-1867-410c-9c53-f552ff30d697 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 947.197798] env[70020]: INFO nova.compute.manager [-] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Took 1.29 seconds to deallocate network for instance. [ 947.246900] env[70020]: DEBUG oslo_concurrency.lockutils [None req-29371888-e02a-43ba-bcf6-3641d5f1ceac tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Lock "8bff6907-c2b0-4ad1-9298-b2d622d33fde" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.443s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 947.355017] env[70020]: DEBUG oslo_vmware.api [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': task-3618575, 'name': PowerOnVM_Task} progress is 78%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.440981] env[70020]: DEBUG oslo_vmware.api [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618576, 'name': PowerOnVM_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.540898] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Releasing lock "refresh_cache-a8982c31-ea86-4a8d-b8c6-006263ef41f8" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 947.552343] env[70020]: DEBUG oslo_vmware.api [None req-68dd9cf4-ff1e-4153-9ed6-8ab7572fa6d3 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618571, 'name': PowerOnVM_Task, 'duration_secs': 1.126044} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.552620] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-68dd9cf4-ff1e-4153-9ed6-8ab7572fa6d3 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 947.552855] env[70020]: DEBUG nova.compute.manager [None req-68dd9cf4-ff1e-4153-9ed6-8ab7572fa6d3 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 947.553627] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b25b96af-44b4-4f62-85fe-099d06a8e5eb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.614237] env[70020]: DEBUG oslo_vmware.api [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Task: {'id': task-3618577, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.114431} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.614983] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 947.616114] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86a4ca13-00fb-4b34-b37e-233389e5fdc4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.645715] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] abc194e3-fb6a-4f2a-8886-e2777530a2a3/abc194e3-fb6a-4f2a-8886-e2777530a2a3.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 947.650793] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d43cd56d-41b4-498f-a1b6-c0a8c4ff6011 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.672772] env[70020]: DEBUG oslo_vmware.api [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Waiting for the task: (returnval){ [ 947.672772] env[70020]: value = "task-3618578" [ 947.672772] env[70020]: _type = "Task" [ 947.672772] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.684368] env[70020]: DEBUG oslo_vmware.api [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Task: {'id': task-3618578, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.704894] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8d64c5e5-0b9b-445c-95f1-ee3f96483c58 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 947.827984] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-145e3bdb-89c1-46f3-8561-98bd4f59d94d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.836543] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e30cf289-6f29-4deb-9299-0f4a8dfb92aa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.887880] env[70020]: DEBUG oslo_vmware.api [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': task-3618575, 'name': PowerOnVM_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.891059] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a8c0597-9ca0-49c5-bcbb-360dc5303211 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.901955] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ccbfd2d-555d-4e4e-812a-dcda05fb2ce0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.918521] env[70020]: DEBUG nova.compute.provider_tree [None req-521ffcfb-6b37-4755-abf4-a675e776b827 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 947.931380] env[70020]: DEBUG oslo_vmware.api [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618576, 'name': PowerOnVM_Task, 'duration_secs': 0.548103} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.931380] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 947.931380] env[70020]: INFO nova.compute.manager [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Took 8.55 seconds to spawn the instance on the hypervisor. [ 947.931380] env[70020]: DEBUG nova.compute.manager [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 947.931688] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92b5d3c8-0db4-46ee-9ada-db1869c9e3bd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.957596] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 947.959163] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa1ad79a-8c90-469d-a329-877c9d282e7a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.969993] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 947.970386] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-faf7ca36-30bf-458f-baf2-c6922afbb983 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.066902] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 948.067154] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 948.067344] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Deleting the datastore file [datastore2] a8982c31-ea86-4a8d-b8c6-006263ef41f8 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 948.068227] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4ab0c7a3-cfe7-4a4d-aacd-2c909956c152 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.079322] env[70020]: DEBUG oslo_vmware.api [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 948.079322] env[70020]: value = "task-3618580" [ 948.079322] env[70020]: _type = "Task" [ 948.079322] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.090962] env[70020]: DEBUG oslo_vmware.api [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618580, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.184561] env[70020]: DEBUG oslo_vmware.api [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Task: {'id': task-3618578, 'name': ReconfigVM_Task, 'duration_secs': 0.471553} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.185271] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Reconfigured VM instance instance-0000004f to attach disk [datastore1] abc194e3-fb6a-4f2a-8886-e2777530a2a3/abc194e3-fb6a-4f2a-8886-e2777530a2a3.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 948.188032] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1e303fc2-b21d-4b6c-9f7f-1ce6079b92fe {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.194283] env[70020]: DEBUG oslo_vmware.api [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Waiting for the task: (returnval){ [ 948.194283] env[70020]: value = "task-3618581" [ 948.194283] env[70020]: _type = "Task" [ 948.194283] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.204800] env[70020]: DEBUG oslo_vmware.api [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Task: {'id': task-3618581, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.352797] env[70020]: DEBUG oslo_vmware.api [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': task-3618575, 'name': PowerOnVM_Task, 'duration_secs': 1.079099} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.353249] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 948.353579] env[70020]: DEBUG nova.compute.manager [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 948.354174] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18f081c8-ef06-4a6b-9c58-f2449bf77a81 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.425058] env[70020]: DEBUG nova.scheduler.client.report [None req-521ffcfb-6b37-4755-abf4-a675e776b827 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 948.461791] env[70020]: INFO nova.compute.manager [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Took 41.47 seconds to build instance. [ 948.590083] env[70020]: DEBUG oslo_vmware.api [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618580, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.320362} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.590683] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 948.590950] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 948.591329] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 948.620082] env[70020]: INFO nova.scheduler.client.report [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Deleted allocations for instance a8982c31-ea86-4a8d-b8c6-006263ef41f8 [ 948.665959] env[70020]: DEBUG nova.compute.manager [req-fc7e582a-8032-475c-9b94-54b1f4fa6bb8 req-4ee77fc1-27c4-43be-a5da-aa03dbff29e8 service nova] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Received event network-vif-unplugged-40bf9877-260b-49fc-85fd-307072a733f1 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 948.665959] env[70020]: DEBUG oslo_concurrency.lockutils [req-fc7e582a-8032-475c-9b94-54b1f4fa6bb8 req-4ee77fc1-27c4-43be-a5da-aa03dbff29e8 service nova] Acquiring lock "a8982c31-ea86-4a8d-b8c6-006263ef41f8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 948.666489] env[70020]: DEBUG oslo_concurrency.lockutils [req-fc7e582a-8032-475c-9b94-54b1f4fa6bb8 req-4ee77fc1-27c4-43be-a5da-aa03dbff29e8 service nova] Lock "a8982c31-ea86-4a8d-b8c6-006263ef41f8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 948.666759] env[70020]: DEBUG oslo_concurrency.lockutils [req-fc7e582a-8032-475c-9b94-54b1f4fa6bb8 req-4ee77fc1-27c4-43be-a5da-aa03dbff29e8 service nova] Lock "a8982c31-ea86-4a8d-b8c6-006263ef41f8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 948.666831] env[70020]: DEBUG nova.compute.manager [req-fc7e582a-8032-475c-9b94-54b1f4fa6bb8 req-4ee77fc1-27c4-43be-a5da-aa03dbff29e8 service nova] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] No waiting events found dispatching network-vif-unplugged-40bf9877-260b-49fc-85fd-307072a733f1 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 948.667146] env[70020]: WARNING nova.compute.manager [req-fc7e582a-8032-475c-9b94-54b1f4fa6bb8 req-4ee77fc1-27c4-43be-a5da-aa03dbff29e8 service nova] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Received unexpected event network-vif-unplugged-40bf9877-260b-49fc-85fd-307072a733f1 for instance with vm_state shelved_offloaded and task_state None. [ 948.667265] env[70020]: DEBUG nova.compute.manager [req-fc7e582a-8032-475c-9b94-54b1f4fa6bb8 req-4ee77fc1-27c4-43be-a5da-aa03dbff29e8 service nova] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Received event network-changed-40bf9877-260b-49fc-85fd-307072a733f1 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 948.667313] env[70020]: DEBUG nova.compute.manager [req-fc7e582a-8032-475c-9b94-54b1f4fa6bb8 req-4ee77fc1-27c4-43be-a5da-aa03dbff29e8 service nova] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Refreshing instance network info cache due to event network-changed-40bf9877-260b-49fc-85fd-307072a733f1. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 948.667524] env[70020]: DEBUG oslo_concurrency.lockutils [req-fc7e582a-8032-475c-9b94-54b1f4fa6bb8 req-4ee77fc1-27c4-43be-a5da-aa03dbff29e8 service nova] Acquiring lock "refresh_cache-a8982c31-ea86-4a8d-b8c6-006263ef41f8" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.667686] env[70020]: DEBUG oslo_concurrency.lockutils [req-fc7e582a-8032-475c-9b94-54b1f4fa6bb8 req-4ee77fc1-27c4-43be-a5da-aa03dbff29e8 service nova] Acquired lock "refresh_cache-a8982c31-ea86-4a8d-b8c6-006263ef41f8" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 948.667794] env[70020]: DEBUG nova.network.neutron [req-fc7e582a-8032-475c-9b94-54b1f4fa6bb8 req-4ee77fc1-27c4-43be-a5da-aa03dbff29e8 service nova] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Refreshing network info cache for port 40bf9877-260b-49fc-85fd-307072a733f1 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 948.706685] env[70020]: DEBUG oslo_vmware.api [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Task: {'id': task-3618581, 'name': Rename_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.882226] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 948.929761] env[70020]: DEBUG oslo_concurrency.lockutils [None req-521ffcfb-6b37-4755-abf4-a675e776b827 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.214s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 948.932720] env[70020]: DEBUG oslo_concurrency.lockutils [None req-86fe144a-f6e0-4d5b-90e7-9c1a169db90c tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.180s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 948.933035] env[70020]: DEBUG nova.objects.instance [None req-86fe144a-f6e0-4d5b-90e7-9c1a169db90c tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lazy-loading 'resources' on Instance uuid bc57657e-99e8-46b8-9731-ddd4864a3114 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 948.958168] env[70020]: INFO nova.scheduler.client.report [None req-521ffcfb-6b37-4755-abf4-a675e776b827 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Deleted allocations for instance ef0d716a-080e-4167-bd34-b2c660b95c88 [ 948.967900] env[70020]: DEBUG oslo_concurrency.lockutils [None req-918c5dbb-3c98-40e8-b485-8462131620ea tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "9dec24d6-af8a-41b9-920c-e4420fc69417" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.102s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 949.125997] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 949.210191] env[70020]: DEBUG oslo_vmware.api [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Task: {'id': task-3618581, 'name': Rename_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.231845] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6e3be2e0-ed2d-4f12-a045-7a7827de5c8a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "9dec24d6-af8a-41b9-920c-e4420fc69417" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 949.231845] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6e3be2e0-ed2d-4f12-a045-7a7827de5c8a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "9dec24d6-af8a-41b9-920c-e4420fc69417" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 949.231845] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6e3be2e0-ed2d-4f12-a045-7a7827de5c8a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "9dec24d6-af8a-41b9-920c-e4420fc69417-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 949.232043] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6e3be2e0-ed2d-4f12-a045-7a7827de5c8a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "9dec24d6-af8a-41b9-920c-e4420fc69417-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 949.232121] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6e3be2e0-ed2d-4f12-a045-7a7827de5c8a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "9dec24d6-af8a-41b9-920c-e4420fc69417-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 949.234611] env[70020]: INFO nova.compute.manager [None req-6e3be2e0-ed2d-4f12-a045-7a7827de5c8a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Terminating instance [ 949.390950] env[70020]: DEBUG nova.network.neutron [req-fc7e582a-8032-475c-9b94-54b1f4fa6bb8 req-4ee77fc1-27c4-43be-a5da-aa03dbff29e8 service nova] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Updated VIF entry in instance network info cache for port 40bf9877-260b-49fc-85fd-307072a733f1. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 949.391340] env[70020]: DEBUG nova.network.neutron [req-fc7e582a-8032-475c-9b94-54b1f4fa6bb8 req-4ee77fc1-27c4-43be-a5da-aa03dbff29e8 service nova] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Updating instance_info_cache with network_info: [{"id": "40bf9877-260b-49fc-85fd-307072a733f1", "address": "fa:16:3e:c1:4b:73", "network": {"id": "83a3ee04-e0ff-40e7-ae51-c463add43abb", "bridge": null, "label": "tempest-DeleteServersTestJSON-2003290189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cfa7d3b1f5a14c60b19cde5030c2f0a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap40bf9877-26", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.473554] env[70020]: DEBUG oslo_concurrency.lockutils [None req-521ffcfb-6b37-4755-abf4-a675e776b827 tempest-ListServersNegativeTestJSON-1728183933 tempest-ListServersNegativeTestJSON-1728183933-project-member] Lock "ef0d716a-080e-4167-bd34-b2c660b95c88" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.900s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 949.707985] env[70020]: DEBUG oslo_vmware.api [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Task: {'id': task-3618581, 'name': Rename_Task, 'duration_secs': 1.065526} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.708392] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 949.708704] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4b60d888-e508-4de2-ae52-dde539ba5596 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.723399] env[70020]: DEBUG oslo_vmware.api [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Waiting for the task: (returnval){ [ 949.723399] env[70020]: value = "task-3618582" [ 949.723399] env[70020]: _type = "Task" [ 949.723399] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.730886] env[70020]: DEBUG oslo_vmware.api [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Task: {'id': task-3618582, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.740185] env[70020]: DEBUG nova.compute.manager [None req-6e3be2e0-ed2d-4f12-a045-7a7827de5c8a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 949.740666] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6e3be2e0-ed2d-4f12-a045-7a7827de5c8a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 949.741665] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-809b7b9c-a4df-4522-aead-84d35fcae409 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.750394] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e3be2e0-ed2d-4f12-a045-7a7827de5c8a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 949.753317] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fc410e99-af0f-4f9b-92dc-3008b004a336 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.761471] env[70020]: DEBUG oslo_vmware.api [None req-6e3be2e0-ed2d-4f12-a045-7a7827de5c8a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 949.761471] env[70020]: value = "task-3618583" [ 949.761471] env[70020]: _type = "Task" [ 949.761471] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.772482] env[70020]: DEBUG oslo_vmware.api [None req-6e3be2e0-ed2d-4f12-a045-7a7827de5c8a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618583, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.830266] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc208195-18a6-4299-9e48-3196397aaf7b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.838573] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70205366-25dc-4b7f-849c-3f03ae4a8dc4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.877722] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b62037ee-10b5-4f91-9106-0260ae32c447 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.886805] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a57c3215-d889-4d8e-b4cb-cff29fcd1315 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.893667] env[70020]: DEBUG oslo_concurrency.lockutils [req-fc7e582a-8032-475c-9b94-54b1f4fa6bb8 req-4ee77fc1-27c4-43be-a5da-aa03dbff29e8 service nova] Releasing lock "refresh_cache-a8982c31-ea86-4a8d-b8c6-006263ef41f8" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 949.902017] env[70020]: DEBUG oslo_concurrency.lockutils [None req-060e6fb1-df52-4899-b811-6676d6208196 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "a8982c31-ea86-4a8d-b8c6-006263ef41f8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 949.902601] env[70020]: DEBUG nova.compute.provider_tree [None req-86fe144a-f6e0-4d5b-90e7-9c1a169db90c tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 950.232984] env[70020]: DEBUG oslo_vmware.api [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Task: {'id': task-3618582, 'name': PowerOnVM_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.271950] env[70020]: DEBUG oslo_vmware.api [None req-6e3be2e0-ed2d-4f12-a045-7a7827de5c8a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618583, 'name': PowerOffVM_Task, 'duration_secs': 0.209819} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.272305] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e3be2e0-ed2d-4f12-a045-7a7827de5c8a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 950.272476] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6e3be2e0-ed2d-4f12-a045-7a7827de5c8a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 950.272732] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3ac06e19-4874-4a49-8700-cf59d0ea21d9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.310487] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5a338c88-ef3c-4c9f-89ea-0e919b6b9747 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Acquiring lock "ff4e958d-0068-429f-af76-5e7d4dd147f3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 950.310805] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5a338c88-ef3c-4c9f-89ea-0e919b6b9747 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Lock "ff4e958d-0068-429f-af76-5e7d4dd147f3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 950.311026] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5a338c88-ef3c-4c9f-89ea-0e919b6b9747 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Acquiring lock "ff4e958d-0068-429f-af76-5e7d4dd147f3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 950.311217] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5a338c88-ef3c-4c9f-89ea-0e919b6b9747 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Lock "ff4e958d-0068-429f-af76-5e7d4dd147f3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 950.311382] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5a338c88-ef3c-4c9f-89ea-0e919b6b9747 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Lock "ff4e958d-0068-429f-af76-5e7d4dd147f3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 950.313631] env[70020]: INFO nova.compute.manager [None req-5a338c88-ef3c-4c9f-89ea-0e919b6b9747 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Terminating instance [ 950.344286] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6e3be2e0-ed2d-4f12-a045-7a7827de5c8a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 950.344643] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6e3be2e0-ed2d-4f12-a045-7a7827de5c8a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 950.344909] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e3be2e0-ed2d-4f12-a045-7a7827de5c8a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Deleting the datastore file [datastore1] 9dec24d6-af8a-41b9-920c-e4420fc69417 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 950.345217] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2dec3fb3-3df1-478d-a1ea-50c365e59224 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.353299] env[70020]: DEBUG oslo_vmware.api [None req-6e3be2e0-ed2d-4f12-a045-7a7827de5c8a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 950.353299] env[70020]: value = "task-3618585" [ 950.353299] env[70020]: _type = "Task" [ 950.353299] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.365454] env[70020]: DEBUG oslo_vmware.api [None req-6e3be2e0-ed2d-4f12-a045-7a7827de5c8a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618585, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.407424] env[70020]: DEBUG nova.scheduler.client.report [None req-86fe144a-f6e0-4d5b-90e7-9c1a169db90c tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 950.741706] env[70020]: DEBUG oslo_vmware.api [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Task: {'id': task-3618582, 'name': PowerOnVM_Task, 'duration_secs': 0.590116} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.742683] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 950.742683] env[70020]: INFO nova.compute.manager [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Took 8.60 seconds to spawn the instance on the hypervisor. [ 950.743073] env[70020]: DEBUG nova.compute.manager [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 950.744034] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01cfb3df-7397-4903-92cc-3096607f977d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.817374] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5a338c88-ef3c-4c9f-89ea-0e919b6b9747 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Acquiring lock "refresh_cache-ff4e958d-0068-429f-af76-5e7d4dd147f3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.817566] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5a338c88-ef3c-4c9f-89ea-0e919b6b9747 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Acquired lock "refresh_cache-ff4e958d-0068-429f-af76-5e7d4dd147f3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 950.817748] env[70020]: DEBUG nova.network.neutron [None req-5a338c88-ef3c-4c9f-89ea-0e919b6b9747 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 950.864134] env[70020]: DEBUG oslo_vmware.api [None req-6e3be2e0-ed2d-4f12-a045-7a7827de5c8a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618585, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.450393} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.864401] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e3be2e0-ed2d-4f12-a045-7a7827de5c8a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 950.864582] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6e3be2e0-ed2d-4f12-a045-7a7827de5c8a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 950.864758] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6e3be2e0-ed2d-4f12-a045-7a7827de5c8a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 950.864924] env[70020]: INFO nova.compute.manager [None req-6e3be2e0-ed2d-4f12-a045-7a7827de5c8a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Took 1.12 seconds to destroy the instance on the hypervisor. [ 950.865180] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6e3be2e0-ed2d-4f12-a045-7a7827de5c8a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 950.865373] env[70020]: DEBUG nova.compute.manager [-] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 950.865471] env[70020]: DEBUG nova.network.neutron [-] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 950.916415] env[70020]: DEBUG oslo_concurrency.lockutils [None req-86fe144a-f6e0-4d5b-90e7-9c1a169db90c tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.984s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 950.919351] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.225s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 950.921086] env[70020]: INFO nova.compute.claims [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 950.947303] env[70020]: INFO nova.scheduler.client.report [None req-86fe144a-f6e0-4d5b-90e7-9c1a169db90c tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Deleted allocations for instance bc57657e-99e8-46b8-9731-ddd4864a3114 [ 951.275471] env[70020]: INFO nova.compute.manager [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Took 42.84 seconds to build instance. [ 951.282810] env[70020]: DEBUG nova.compute.manager [req-1b7f2db8-d565-4f0b-8b9a-5d65244e2974 req-e5e5cecf-bff1-4a7e-b45d-ee1a36f0e9b3 service nova] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Received event network-vif-deleted-4a49418e-b633-42e9-b84c-6a2ece113e59 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 951.283122] env[70020]: INFO nova.compute.manager [req-1b7f2db8-d565-4f0b-8b9a-5d65244e2974 req-e5e5cecf-bff1-4a7e-b45d-ee1a36f0e9b3 service nova] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Neutron deleted interface 4a49418e-b633-42e9-b84c-6a2ece113e59; detaching it from the instance and deleting it from the info cache [ 951.283304] env[70020]: DEBUG nova.network.neutron [req-1b7f2db8-d565-4f0b-8b9a-5d65244e2974 req-e5e5cecf-bff1-4a7e-b45d-ee1a36f0e9b3 service nova] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.335135] env[70020]: DEBUG nova.network.neutron [None req-5a338c88-ef3c-4c9f-89ea-0e919b6b9747 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 951.456133] env[70020]: DEBUG oslo_concurrency.lockutils [None req-86fe144a-f6e0-4d5b-90e7-9c1a169db90c tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "bc57657e-99e8-46b8-9731-ddd4864a3114" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.571s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 951.590798] env[70020]: DEBUG nova.network.neutron [None req-5a338c88-ef3c-4c9f-89ea-0e919b6b9747 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.648326] env[70020]: DEBUG nova.network.neutron [-] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.780614] env[70020]: DEBUG oslo_concurrency.lockutils [None req-32aa862b-af80-4023-a33d-9302cbc5fb88 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Lock "abc194e3-fb6a-4f2a-8886-e2777530a2a3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 89.286s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 951.786032] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c569f182-5f4a-4c79-bd94-bd6cb12b88cd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.801970] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aebc2dba-cda4-4687-9439-e83338f0cc30 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.849709] env[70020]: DEBUG nova.compute.manager [req-1b7f2db8-d565-4f0b-8b9a-5d65244e2974 req-e5e5cecf-bff1-4a7e-b45d-ee1a36f0e9b3 service nova] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Detach interface failed, port_id=4a49418e-b633-42e9-b84c-6a2ece113e59, reason: Instance 9dec24d6-af8a-41b9-920c-e4420fc69417 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 952.093779] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5a338c88-ef3c-4c9f-89ea-0e919b6b9747 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Releasing lock "refresh_cache-ff4e958d-0068-429f-af76-5e7d4dd147f3" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 952.094239] env[70020]: DEBUG nova.compute.manager [None req-5a338c88-ef3c-4c9f-89ea-0e919b6b9747 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 952.094435] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5a338c88-ef3c-4c9f-89ea-0e919b6b9747 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 952.095507] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cbcdbcd-9187-4188-9268-9cc086b2cc88 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.105478] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a338c88-ef3c-4c9f-89ea-0e919b6b9747 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 952.105659] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-380bc168-7715-4367-8c80-18e2d2a59147 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.117887] env[70020]: DEBUG oslo_vmware.api [None req-5a338c88-ef3c-4c9f-89ea-0e919b6b9747 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Waiting for the task: (returnval){ [ 952.117887] env[70020]: value = "task-3618586" [ 952.117887] env[70020]: _type = "Task" [ 952.117887] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.127560] env[70020]: DEBUG oslo_vmware.api [None req-5a338c88-ef3c-4c9f-89ea-0e919b6b9747 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': task-3618586, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.155584] env[70020]: INFO nova.compute.manager [-] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Took 1.29 seconds to deallocate network for instance. [ 952.378710] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6be8c51b-f909-4a3a-ba04-fc0fd9eaddf7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.388740] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c55ec5c-3336-44a6-aef0-e244dc79cc79 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.419272] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35f5a720-a78f-45f0-af03-ee6da141566b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.430747] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f46aef76-4ade-4ee1-88b2-3b5580e1106f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.446146] env[70020]: DEBUG nova.compute.provider_tree [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 952.633539] env[70020]: DEBUG oslo_vmware.api [None req-5a338c88-ef3c-4c9f-89ea-0e919b6b9747 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': task-3618586, 'name': PowerOffVM_Task, 'duration_secs': 0.243781} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.633947] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a338c88-ef3c-4c9f-89ea-0e919b6b9747 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 952.634051] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5a338c88-ef3c-4c9f-89ea-0e919b6b9747 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 952.634277] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e9e54178-444c-48ec-876c-d553d125a811 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.665366] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5a338c88-ef3c-4c9f-89ea-0e919b6b9747 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 952.668018] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5a338c88-ef3c-4c9f-89ea-0e919b6b9747 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 952.668018] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a338c88-ef3c-4c9f-89ea-0e919b6b9747 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Deleting the datastore file [datastore1] ff4e958d-0068-429f-af76-5e7d4dd147f3 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 952.668018] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b70a0674-01ac-4265-94da-8ad61d618620 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.670831] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6e3be2e0-ed2d-4f12-a045-7a7827de5c8a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 952.671353] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2beff42b-96e6-4cf4-a9a7-dd1edd86c239 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquiring lock "ea97f6ab-057e-44d3-835a-68b46d241621" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 952.671673] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2beff42b-96e6-4cf4-a9a7-dd1edd86c239 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "ea97f6ab-057e-44d3-835a-68b46d241621" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 952.671981] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2beff42b-96e6-4cf4-a9a7-dd1edd86c239 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquiring lock "ea97f6ab-057e-44d3-835a-68b46d241621-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 952.672263] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2beff42b-96e6-4cf4-a9a7-dd1edd86c239 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "ea97f6ab-057e-44d3-835a-68b46d241621-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 952.672518] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2beff42b-96e6-4cf4-a9a7-dd1edd86c239 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "ea97f6ab-057e-44d3-835a-68b46d241621-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 952.675749] env[70020]: DEBUG oslo_vmware.api [None req-5a338c88-ef3c-4c9f-89ea-0e919b6b9747 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Waiting for the task: (returnval){ [ 952.675749] env[70020]: value = "task-3618588" [ 952.675749] env[70020]: _type = "Task" [ 952.675749] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.676696] env[70020]: INFO nova.compute.manager [None req-2beff42b-96e6-4cf4-a9a7-dd1edd86c239 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Terminating instance [ 952.688648] env[70020]: DEBUG oslo_vmware.api [None req-5a338c88-ef3c-4c9f-89ea-0e919b6b9747 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': task-3618588, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.952509] env[70020]: DEBUG nova.scheduler.client.report [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 953.183857] env[70020]: DEBUG nova.compute.manager [None req-2beff42b-96e6-4cf4-a9a7-dd1edd86c239 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 953.183857] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2beff42b-96e6-4cf4-a9a7-dd1edd86c239 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 953.184925] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acc3a488-f127-4cb5-a64b-f27df99aac3f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.198132] env[70020]: DEBUG oslo_vmware.api [None req-5a338c88-ef3c-4c9f-89ea-0e919b6b9747 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Task: {'id': task-3618588, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.095958} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.201159] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a338c88-ef3c-4c9f-89ea-0e919b6b9747 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 953.201289] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5a338c88-ef3c-4c9f-89ea-0e919b6b9747 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 953.201507] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5a338c88-ef3c-4c9f-89ea-0e919b6b9747 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 953.201695] env[70020]: INFO nova.compute.manager [None req-5a338c88-ef3c-4c9f-89ea-0e919b6b9747 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Took 1.11 seconds to destroy the instance on the hypervisor. [ 953.201934] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5a338c88-ef3c-4c9f-89ea-0e919b6b9747 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 953.202172] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2beff42b-96e6-4cf4-a9a7-dd1edd86c239 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 953.202634] env[70020]: DEBUG nova.compute.manager [-] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 953.202737] env[70020]: DEBUG nova.network.neutron [-] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 953.204402] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0caf9696-16ec-4117-a7b1-ee2d9a57802c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.215505] env[70020]: DEBUG oslo_vmware.api [None req-2beff42b-96e6-4cf4-a9a7-dd1edd86c239 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 953.215505] env[70020]: value = "task-3618589" [ 953.215505] env[70020]: _type = "Task" [ 953.215505] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.228347] env[70020]: DEBUG oslo_vmware.api [None req-2beff42b-96e6-4cf4-a9a7-dd1edd86c239 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618589, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.229168] env[70020]: DEBUG nova.network.neutron [-] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 953.456668] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.537s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 953.457314] env[70020]: DEBUG nova.compute.manager [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 953.461317] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8e5e3c9a-8f68-4dc3-b34f-bbdf12f20975 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.761s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 953.461317] env[70020]: DEBUG nova.objects.instance [None req-8e5e3c9a-8f68-4dc3-b34f-bbdf12f20975 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lazy-loading 'resources' on Instance uuid 61875dcc-5b76-409b-987f-4ae875909257 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 953.685170] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3934d595-d839-4960-9b8a-34524dd8de84 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Acquiring lock "abc194e3-fb6a-4f2a-8886-e2777530a2a3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 953.685466] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3934d595-d839-4960-9b8a-34524dd8de84 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Lock "abc194e3-fb6a-4f2a-8886-e2777530a2a3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 953.685596] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3934d595-d839-4960-9b8a-34524dd8de84 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Acquiring lock "abc194e3-fb6a-4f2a-8886-e2777530a2a3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 953.685771] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3934d595-d839-4960-9b8a-34524dd8de84 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Lock "abc194e3-fb6a-4f2a-8886-e2777530a2a3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 953.685932] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3934d595-d839-4960-9b8a-34524dd8de84 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Lock "abc194e3-fb6a-4f2a-8886-e2777530a2a3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 953.688102] env[70020]: INFO nova.compute.manager [None req-3934d595-d839-4960-9b8a-34524dd8de84 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Terminating instance [ 953.726246] env[70020]: DEBUG oslo_vmware.api [None req-2beff42b-96e6-4cf4-a9a7-dd1edd86c239 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618589, 'name': PowerOffVM_Task, 'duration_secs': 0.216337} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.726434] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2beff42b-96e6-4cf4-a9a7-dd1edd86c239 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 953.726544] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2beff42b-96e6-4cf4-a9a7-dd1edd86c239 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 953.726783] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cea7aafa-2957-4de8-9270-e8f86a06262f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.731489] env[70020]: DEBUG nova.network.neutron [-] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 953.752329] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf71c047-d989-498e-be15-5729b23343ab tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Acquiring lock "edef9245-4048-4ea4-90cc-ebed54498d88" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 953.752713] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf71c047-d989-498e-be15-5729b23343ab tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Lock "edef9245-4048-4ea4-90cc-ebed54498d88" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 953.753296] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf71c047-d989-498e-be15-5729b23343ab tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Acquiring lock "edef9245-4048-4ea4-90cc-ebed54498d88-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 953.753296] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf71c047-d989-498e-be15-5729b23343ab tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Lock "edef9245-4048-4ea4-90cc-ebed54498d88-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 953.753459] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf71c047-d989-498e-be15-5729b23343ab tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Lock "edef9245-4048-4ea4-90cc-ebed54498d88-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 953.756022] env[70020]: INFO nova.compute.manager [None req-bf71c047-d989-498e-be15-5729b23343ab tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Terminating instance [ 953.806905] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2beff42b-96e6-4cf4-a9a7-dd1edd86c239 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 953.807136] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2beff42b-96e6-4cf4-a9a7-dd1edd86c239 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 953.807136] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-2beff42b-96e6-4cf4-a9a7-dd1edd86c239 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Deleting the datastore file [datastore2] ea97f6ab-057e-44d3-835a-68b46d241621 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 953.807460] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fac1e96f-74ed-45ed-a5bb-c191ffc61524 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.814855] env[70020]: DEBUG oslo_vmware.api [None req-2beff42b-96e6-4cf4-a9a7-dd1edd86c239 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for the task: (returnval){ [ 953.814855] env[70020]: value = "task-3618591" [ 953.814855] env[70020]: _type = "Task" [ 953.814855] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.824339] env[70020]: DEBUG oslo_vmware.api [None req-2beff42b-96e6-4cf4-a9a7-dd1edd86c239 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618591, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.964125] env[70020]: DEBUG nova.compute.utils [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 953.965581] env[70020]: DEBUG nova.compute.manager [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 953.966446] env[70020]: DEBUG nova.network.neutron [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 954.029116] env[70020]: DEBUG nova.policy [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b511ec320fdc4dacab9e6f66a50f625c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'affdcbe1612b434697a53a8692ef77a4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 954.054040] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Acquiring lock "2198e7f8-5458-4b97-abb3-0a3c932cebc2" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 954.054040] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Lock "2198e7f8-5458-4b97-abb3-0a3c932cebc2" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 954.054040] env[70020]: INFO nova.compute.manager [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Shelving [ 954.191547] env[70020]: DEBUG nova.compute.manager [None req-3934d595-d839-4960-9b8a-34524dd8de84 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 954.191547] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3934d595-d839-4960-9b8a-34524dd8de84 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 954.192394] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9386de9f-725e-4d5a-8030-e9abdc610116 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.200990] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3934d595-d839-4960-9b8a-34524dd8de84 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 954.201294] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3d03210d-b107-4c17-a764-798329ede46c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.207931] env[70020]: DEBUG oslo_vmware.api [None req-3934d595-d839-4960-9b8a-34524dd8de84 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Waiting for the task: (returnval){ [ 954.207931] env[70020]: value = "task-3618592" [ 954.207931] env[70020]: _type = "Task" [ 954.207931] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.218607] env[70020]: DEBUG oslo_vmware.api [None req-3934d595-d839-4960-9b8a-34524dd8de84 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Task: {'id': task-3618592, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.236159] env[70020]: INFO nova.compute.manager [-] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Took 1.03 seconds to deallocate network for instance. [ 954.261595] env[70020]: DEBUG nova.compute.manager [None req-bf71c047-d989-498e-be15-5729b23343ab tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 954.261595] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bf71c047-d989-498e-be15-5729b23343ab tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 954.261595] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6511a7a1-3671-4319-8759-ce2fdea9e6e1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.270794] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf71c047-d989-498e-be15-5729b23343ab tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 954.271644] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a065dfd9-0db8-406c-8d36-a7b13b2c024d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.281327] env[70020]: DEBUG oslo_vmware.api [None req-bf71c047-d989-498e-be15-5729b23343ab tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Waiting for the task: (returnval){ [ 954.281327] env[70020]: value = "task-3618593" [ 954.281327] env[70020]: _type = "Task" [ 954.281327] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.291797] env[70020]: DEBUG oslo_vmware.api [None req-bf71c047-d989-498e-be15-5729b23343ab tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618593, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.327051] env[70020]: DEBUG oslo_vmware.api [None req-2beff42b-96e6-4cf4-a9a7-dd1edd86c239 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Task: {'id': task-3618591, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.125409} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.327333] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-2beff42b-96e6-4cf4-a9a7-dd1edd86c239 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 954.327928] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2beff42b-96e6-4cf4-a9a7-dd1edd86c239 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 954.328841] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2beff42b-96e6-4cf4-a9a7-dd1edd86c239 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 954.328841] env[70020]: INFO nova.compute.manager [None req-2beff42b-96e6-4cf4-a9a7-dd1edd86c239 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Took 1.15 seconds to destroy the instance on the hypervisor. [ 954.328841] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2beff42b-96e6-4cf4-a9a7-dd1edd86c239 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 954.330218] env[70020]: DEBUG nova.network.neutron [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Successfully created port: b33639f5-3e61-4132-80fc-92b074ea22a1 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 954.335240] env[70020]: DEBUG nova.compute.manager [-] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 954.335454] env[70020]: DEBUG nova.network.neutron [-] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 954.399352] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55d24ba1-69e2-42a2-b5ff-4aaa9ce841ad {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.410258] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48f9096b-4673-4ef9-8fe1-115c74c1cb61 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.452868] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4358fe09-9af7-445d-9f19-845d317ca14d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.461437] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e14c1a6f-9ce2-4472-970e-0c961ae669b1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.477949] env[70020]: DEBUG nova.compute.manager [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 954.485280] env[70020]: DEBUG nova.compute.provider_tree [None req-8e5e3c9a-8f68-4dc3-b34f-bbdf12f20975 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 954.723021] env[70020]: DEBUG oslo_vmware.api [None req-3934d595-d839-4960-9b8a-34524dd8de84 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Task: {'id': task-3618592, 'name': PowerOffVM_Task, 'duration_secs': 0.244841} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.723366] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3934d595-d839-4960-9b8a-34524dd8de84 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 954.723512] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3934d595-d839-4960-9b8a-34524dd8de84 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 954.723836] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8328d905-7f1f-4cfa-9ad3-d354913f2fc6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.742050] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5a338c88-ef3c-4c9f-89ea-0e919b6b9747 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 954.792651] env[70020]: DEBUG oslo_vmware.api [None req-bf71c047-d989-498e-be15-5729b23343ab tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618593, 'name': PowerOffVM_Task, 'duration_secs': 0.248132} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.792959] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf71c047-d989-498e-be15-5729b23343ab tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 954.793144] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bf71c047-d989-498e-be15-5729b23343ab tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 954.793405] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4fb195e9-b284-4127-921e-c9205730313f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.803869] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3934d595-d839-4960-9b8a-34524dd8de84 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 954.804099] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3934d595-d839-4960-9b8a-34524dd8de84 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 954.804286] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-3934d595-d839-4960-9b8a-34524dd8de84 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Deleting the datastore file [datastore1] abc194e3-fb6a-4f2a-8886-e2777530a2a3 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 954.804641] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f2339acf-ad0c-4756-b0ed-ce739b30d4b6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.812944] env[70020]: DEBUG oslo_vmware.api [None req-3934d595-d839-4960-9b8a-34524dd8de84 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Waiting for the task: (returnval){ [ 954.812944] env[70020]: value = "task-3618596" [ 954.812944] env[70020]: _type = "Task" [ 954.812944] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.822041] env[70020]: DEBUG oslo_vmware.api [None req-3934d595-d839-4960-9b8a-34524dd8de84 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Task: {'id': task-3618596, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.864797] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bf71c047-d989-498e-be15-5729b23343ab tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 954.865124] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bf71c047-d989-498e-be15-5729b23343ab tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 954.865265] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf71c047-d989-498e-be15-5729b23343ab tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Deleting the datastore file [datastore2] edef9245-4048-4ea4-90cc-ebed54498d88 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 954.865553] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4c5ed2e6-aaba-49a6-befe-fa91d0789912 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.872810] env[70020]: DEBUG oslo_vmware.api [None req-bf71c047-d989-498e-be15-5729b23343ab tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Waiting for the task: (returnval){ [ 954.872810] env[70020]: value = "task-3618597" [ 954.872810] env[70020]: _type = "Task" [ 954.872810] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.883491] env[70020]: DEBUG oslo_vmware.api [None req-bf71c047-d989-498e-be15-5729b23343ab tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618597, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.888602] env[70020]: DEBUG nova.compute.manager [req-46ca4f49-cfd9-4da6-9a76-1b9645714eb1 req-ab137d25-cb6b-496f-becb-e2067f7cdf12 service nova] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Received event network-vif-deleted-6348da2f-b0bd-499f-bf5e-b14a38d29438 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 954.889094] env[70020]: INFO nova.compute.manager [req-46ca4f49-cfd9-4da6-9a76-1b9645714eb1 req-ab137d25-cb6b-496f-becb-e2067f7cdf12 service nova] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Neutron deleted interface 6348da2f-b0bd-499f-bf5e-b14a38d29438; detaching it from the instance and deleting it from the info cache [ 954.889094] env[70020]: DEBUG nova.network.neutron [req-46ca4f49-cfd9-4da6-9a76-1b9645714eb1 req-ab137d25-cb6b-496f-becb-e2067f7cdf12 service nova] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.995193] env[70020]: DEBUG nova.scheduler.client.report [None req-8e5e3c9a-8f68-4dc3-b34f-bbdf12f20975 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 955.063284] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 955.063638] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-859c1065-13d0-4e8c-85fb-7dc09ed258e7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.072643] env[70020]: DEBUG oslo_vmware.api [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for the task: (returnval){ [ 955.072643] env[70020]: value = "task-3618598" [ 955.072643] env[70020]: _type = "Task" [ 955.072643] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.081438] env[70020]: DEBUG oslo_vmware.api [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618598, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.238392] env[70020]: DEBUG nova.network.neutron [-] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.323792] env[70020]: DEBUG oslo_vmware.api [None req-3934d595-d839-4960-9b8a-34524dd8de84 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Task: {'id': task-3618596, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.183506} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.324063] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-3934d595-d839-4960-9b8a-34524dd8de84 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 955.324254] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3934d595-d839-4960-9b8a-34524dd8de84 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 955.324429] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3934d595-d839-4960-9b8a-34524dd8de84 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 955.324605] env[70020]: INFO nova.compute.manager [None req-3934d595-d839-4960-9b8a-34524dd8de84 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Took 1.13 seconds to destroy the instance on the hypervisor. [ 955.324843] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3934d595-d839-4960-9b8a-34524dd8de84 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 955.325053] env[70020]: DEBUG nova.compute.manager [-] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 955.325155] env[70020]: DEBUG nova.network.neutron [-] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 955.387750] env[70020]: DEBUG oslo_vmware.api [None req-bf71c047-d989-498e-be15-5729b23343ab tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618597, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.153717} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.388037] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf71c047-d989-498e-be15-5729b23343ab tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 955.388225] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bf71c047-d989-498e-be15-5729b23343ab tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 955.388403] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bf71c047-d989-498e-be15-5729b23343ab tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 955.388573] env[70020]: INFO nova.compute.manager [None req-bf71c047-d989-498e-be15-5729b23343ab tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Took 1.13 seconds to destroy the instance on the hypervisor. [ 955.388811] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bf71c047-d989-498e-be15-5729b23343ab tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 955.389010] env[70020]: DEBUG nova.compute.manager [-] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 955.389633] env[70020]: DEBUG nova.network.neutron [-] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 955.391864] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d1255141-9c58-475b-bc9c-2be1f776c688 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.402981] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46246900-6a2e-4210-8ca9-efc4e7b5455b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.438180] env[70020]: DEBUG nova.compute.manager [req-46ca4f49-cfd9-4da6-9a76-1b9645714eb1 req-ab137d25-cb6b-496f-becb-e2067f7cdf12 service nova] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Detach interface failed, port_id=6348da2f-b0bd-499f-bf5e-b14a38d29438, reason: Instance ea97f6ab-057e-44d3-835a-68b46d241621 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 955.499196] env[70020]: DEBUG nova.compute.manager [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 955.502849] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8e5e3c9a-8f68-4dc3-b34f-bbdf12f20975 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.042s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 955.505479] env[70020]: DEBUG oslo_concurrency.lockutils [None req-57a59576-14ab-49c9-9c2b-c87f4b756353 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.223s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 955.505602] env[70020]: DEBUG nova.objects.instance [None req-57a59576-14ab-49c9-9c2b-c87f4b756353 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lazy-loading 'resources' on Instance uuid d65ab5e0-189c-43e1-accf-16248ad02852 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 955.533730] env[70020]: DEBUG nova.virt.hardware [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 955.534332] env[70020]: DEBUG nova.virt.hardware [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 955.534410] env[70020]: DEBUG nova.virt.hardware [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 955.534656] env[70020]: DEBUG nova.virt.hardware [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 955.534864] env[70020]: DEBUG nova.virt.hardware [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 955.535085] env[70020]: DEBUG nova.virt.hardware [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 955.535514] env[70020]: DEBUG nova.virt.hardware [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 955.535514] env[70020]: DEBUG nova.virt.hardware [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 955.535630] env[70020]: DEBUG nova.virt.hardware [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 955.535882] env[70020]: DEBUG nova.virt.hardware [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 955.535969] env[70020]: DEBUG nova.virt.hardware [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 955.537155] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e9ab408-56d5-4fdd-910f-ab0d44c65d68 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.540522] env[70020]: INFO nova.scheduler.client.report [None req-8e5e3c9a-8f68-4dc3-b34f-bbdf12f20975 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Deleted allocations for instance 61875dcc-5b76-409b-987f-4ae875909257 [ 955.554221] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0760d3cd-932c-47f2-a9de-74fda6460c00 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.586540] env[70020]: DEBUG oslo_vmware.api [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618598, 'name': PowerOffVM_Task, 'duration_secs': 0.205929} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.586953] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 955.587702] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a940bd79-e5a3-4553-a47a-2aee9f501072 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.610623] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31d1084a-f544-4218-801e-d73a7e2c4700 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.685753] env[70020]: DEBUG nova.compute.manager [req-6d6608f7-8719-4319-8fa5-dd7dfdee4c1c req-a7e233e5-eede-4ab5-a44e-91761c0c9211 service nova] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Received event network-vif-deleted-ad1f3bb4-6fad-4f75-ad98-b53b3676f111 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 955.686062] env[70020]: INFO nova.compute.manager [req-6d6608f7-8719-4319-8fa5-dd7dfdee4c1c req-a7e233e5-eede-4ab5-a44e-91761c0c9211 service nova] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Neutron deleted interface ad1f3bb4-6fad-4f75-ad98-b53b3676f111; detaching it from the instance and deleting it from the info cache [ 955.686611] env[70020]: DEBUG nova.network.neutron [req-6d6608f7-8719-4319-8fa5-dd7dfdee4c1c req-a7e233e5-eede-4ab5-a44e-91761c0c9211 service nova] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.741766] env[70020]: INFO nova.compute.manager [-] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Took 1.41 seconds to deallocate network for instance. [ 955.971923] env[70020]: DEBUG nova.network.neutron [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Successfully updated port: b33639f5-3e61-4132-80fc-92b074ea22a1 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 956.057348] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8e5e3c9a-8f68-4dc3-b34f-bbdf12f20975 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "61875dcc-5b76-409b-987f-4ae875909257" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.908s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 956.078748] env[70020]: DEBUG nova.network.neutron [-] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 956.124297] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Creating Snapshot of the VM instance {{(pid=70020) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 956.124869] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-328f7806-dbb9-466b-8bb5-fa322858c3cc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.135366] env[70020]: DEBUG oslo_vmware.api [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for the task: (returnval){ [ 956.135366] env[70020]: value = "task-3618599" [ 956.135366] env[70020]: _type = "Task" [ 956.135366] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.147932] env[70020]: DEBUG oslo_vmware.api [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618599, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.170132] env[70020]: DEBUG nova.network.neutron [-] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 956.190438] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-97c6816b-6fe6-49cf-9dde-0e11bceeeb4b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.200571] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd54e089-7ad2-4035-9555-18826bb01606 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.241006] env[70020]: DEBUG nova.compute.manager [req-6d6608f7-8719-4319-8fa5-dd7dfdee4c1c req-a7e233e5-eede-4ab5-a44e-91761c0c9211 service nova] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Detach interface failed, port_id=ad1f3bb4-6fad-4f75-ad98-b53b3676f111, reason: Instance edef9245-4048-4ea4-90cc-ebed54498d88 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 956.248955] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2beff42b-96e6-4cf4-a9a7-dd1edd86c239 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 956.380216] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c90d2f9-a794-4840-a7f3-61cf89c8e306 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.387882] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15404540-b79f-4849-995f-5bc499d9f906 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.420820] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f4c2fc8-ceac-43f1-b0f5-f6ee44200b26 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.428622] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1cd55d2-65f8-4bec-a0d7-9411f0a4ea76 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.443803] env[70020]: DEBUG nova.compute.provider_tree [None req-57a59576-14ab-49c9-9c2b-c87f4b756353 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 956.474365] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Acquiring lock "refresh_cache-2ccd34c8-b433-41be-b800-d06a0595bff9" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.474538] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Acquired lock "refresh_cache-2ccd34c8-b433-41be-b800-d06a0595bff9" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 956.474765] env[70020]: DEBUG nova.network.neutron [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 956.581915] env[70020]: INFO nova.compute.manager [-] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Took 1.26 seconds to deallocate network for instance. [ 956.662751] env[70020]: DEBUG oslo_vmware.api [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618599, 'name': CreateSnapshot_Task, 'duration_secs': 0.446531} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.663183] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Created Snapshot of the VM instance {{(pid=70020) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 956.664207] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b90c029a-9abe-47df-b044-97f95ed9b7e1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.672175] env[70020]: INFO nova.compute.manager [-] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Took 1.28 seconds to deallocate network for instance. [ 956.917760] env[70020]: DEBUG nova.compute.manager [req-48f14c0f-4d76-4188-a3e1-3a4d141e0f68 req-f325cf6d-5775-483e-921f-70fd0f8e91ce service nova] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Received event network-vif-deleted-85556b4a-7248-4583-8362-b471b8b0abbe {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 956.918061] env[70020]: DEBUG nova.compute.manager [req-48f14c0f-4d76-4188-a3e1-3a4d141e0f68 req-f325cf6d-5775-483e-921f-70fd0f8e91ce service nova] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Received event network-vif-plugged-b33639f5-3e61-4132-80fc-92b074ea22a1 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 956.918352] env[70020]: DEBUG oslo_concurrency.lockutils [req-48f14c0f-4d76-4188-a3e1-3a4d141e0f68 req-f325cf6d-5775-483e-921f-70fd0f8e91ce service nova] Acquiring lock "2ccd34c8-b433-41be-b800-d06a0595bff9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 956.918573] env[70020]: DEBUG oslo_concurrency.lockutils [req-48f14c0f-4d76-4188-a3e1-3a4d141e0f68 req-f325cf6d-5775-483e-921f-70fd0f8e91ce service nova] Lock "2ccd34c8-b433-41be-b800-d06a0595bff9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 956.918745] env[70020]: DEBUG oslo_concurrency.lockutils [req-48f14c0f-4d76-4188-a3e1-3a4d141e0f68 req-f325cf6d-5775-483e-921f-70fd0f8e91ce service nova] Lock "2ccd34c8-b433-41be-b800-d06a0595bff9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 956.918957] env[70020]: DEBUG nova.compute.manager [req-48f14c0f-4d76-4188-a3e1-3a4d141e0f68 req-f325cf6d-5775-483e-921f-70fd0f8e91ce service nova] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] No waiting events found dispatching network-vif-plugged-b33639f5-3e61-4132-80fc-92b074ea22a1 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 956.919069] env[70020]: WARNING nova.compute.manager [req-48f14c0f-4d76-4188-a3e1-3a4d141e0f68 req-f325cf6d-5775-483e-921f-70fd0f8e91ce service nova] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Received unexpected event network-vif-plugged-b33639f5-3e61-4132-80fc-92b074ea22a1 for instance with vm_state building and task_state spawning. [ 956.919235] env[70020]: DEBUG nova.compute.manager [req-48f14c0f-4d76-4188-a3e1-3a4d141e0f68 req-f325cf6d-5775-483e-921f-70fd0f8e91ce service nova] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Received event network-changed-b33639f5-3e61-4132-80fc-92b074ea22a1 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 956.919380] env[70020]: DEBUG nova.compute.manager [req-48f14c0f-4d76-4188-a3e1-3a4d141e0f68 req-f325cf6d-5775-483e-921f-70fd0f8e91ce service nova] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Refreshing instance network info cache due to event network-changed-b33639f5-3e61-4132-80fc-92b074ea22a1. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 956.919542] env[70020]: DEBUG oslo_concurrency.lockutils [req-48f14c0f-4d76-4188-a3e1-3a4d141e0f68 req-f325cf6d-5775-483e-921f-70fd0f8e91ce service nova] Acquiring lock "refresh_cache-2ccd34c8-b433-41be-b800-d06a0595bff9" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.947098] env[70020]: DEBUG nova.scheduler.client.report [None req-57a59576-14ab-49c9-9c2b-c87f4b756353 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 957.008270] env[70020]: DEBUG nova.network.neutron [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 957.090905] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3934d595-d839-4960-9b8a-34524dd8de84 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 957.144541] env[70020]: DEBUG nova.network.neutron [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Updating instance_info_cache with network_info: [{"id": "b33639f5-3e61-4132-80fc-92b074ea22a1", "address": "fa:16:3e:99:0b:02", "network": {"id": "2c56b682-8e9a-4fb4-9724-a34a00bb4455", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1229093518-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "affdcbe1612b434697a53a8692ef77a4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4d548e7-d762-406a-bb2d-dc7168a8ca67", "external-id": "nsx-vlan-transportzone-796", "segmentation_id": 796, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb33639f5-3e", "ovs_interfaceid": "b33639f5-3e61-4132-80fc-92b074ea22a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.197337] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Creating linked-clone VM from snapshot {{(pid=70020) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 957.198413] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf71c047-d989-498e-be15-5729b23343ab tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 957.198981] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-9137bd43-76c0-4b4d-9849-c49f40789e24 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.208766] env[70020]: DEBUG oslo_vmware.api [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for the task: (returnval){ [ 957.208766] env[70020]: value = "task-3618600" [ 957.208766] env[70020]: _type = "Task" [ 957.208766] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.218103] env[70020]: DEBUG oslo_vmware.api [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618600, 'name': CloneVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.453023] env[70020]: DEBUG oslo_concurrency.lockutils [None req-57a59576-14ab-49c9-9c2b-c87f4b756353 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.948s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 957.455750] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.627s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 957.457221] env[70020]: INFO nova.compute.claims [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 957.479805] env[70020]: INFO nova.scheduler.client.report [None req-57a59576-14ab-49c9-9c2b-c87f4b756353 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Deleted allocations for instance d65ab5e0-189c-43e1-accf-16248ad02852 [ 957.647217] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Releasing lock "refresh_cache-2ccd34c8-b433-41be-b800-d06a0595bff9" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 957.647618] env[70020]: DEBUG nova.compute.manager [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Instance network_info: |[{"id": "b33639f5-3e61-4132-80fc-92b074ea22a1", "address": "fa:16:3e:99:0b:02", "network": {"id": "2c56b682-8e9a-4fb4-9724-a34a00bb4455", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1229093518-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "affdcbe1612b434697a53a8692ef77a4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4d548e7-d762-406a-bb2d-dc7168a8ca67", "external-id": "nsx-vlan-transportzone-796", "segmentation_id": 796, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb33639f5-3e", "ovs_interfaceid": "b33639f5-3e61-4132-80fc-92b074ea22a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 957.648129] env[70020]: DEBUG oslo_concurrency.lockutils [req-48f14c0f-4d76-4188-a3e1-3a4d141e0f68 req-f325cf6d-5775-483e-921f-70fd0f8e91ce service nova] Acquired lock "refresh_cache-2ccd34c8-b433-41be-b800-d06a0595bff9" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 957.648359] env[70020]: DEBUG nova.network.neutron [req-48f14c0f-4d76-4188-a3e1-3a4d141e0f68 req-f325cf6d-5775-483e-921f-70fd0f8e91ce service nova] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Refreshing network info cache for port b33639f5-3e61-4132-80fc-92b074ea22a1 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 957.649589] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:99:0b:02', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b4d548e7-d762-406a-bb2d-dc7168a8ca67', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b33639f5-3e61-4132-80fc-92b074ea22a1', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 957.657366] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 957.660321] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 957.660783] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-784b6501-4d2e-45c3-a303-92ce584de853 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.682837] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 957.682837] env[70020]: value = "task-3618601" [ 957.682837] env[70020]: _type = "Task" [ 957.682837] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.691517] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618601, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.718632] env[70020]: DEBUG oslo_vmware.api [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618600, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.898370] env[70020]: DEBUG nova.network.neutron [req-48f14c0f-4d76-4188-a3e1-3a4d141e0f68 req-f325cf6d-5775-483e-921f-70fd0f8e91ce service nova] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Updated VIF entry in instance network info cache for port b33639f5-3e61-4132-80fc-92b074ea22a1. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 957.899038] env[70020]: DEBUG nova.network.neutron [req-48f14c0f-4d76-4188-a3e1-3a4d141e0f68 req-f325cf6d-5775-483e-921f-70fd0f8e91ce service nova] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Updating instance_info_cache with network_info: [{"id": "b33639f5-3e61-4132-80fc-92b074ea22a1", "address": "fa:16:3e:99:0b:02", "network": {"id": "2c56b682-8e9a-4fb4-9724-a34a00bb4455", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1229093518-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "affdcbe1612b434697a53a8692ef77a4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4d548e7-d762-406a-bb2d-dc7168a8ca67", "external-id": "nsx-vlan-transportzone-796", "segmentation_id": 796, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb33639f5-3e", "ovs_interfaceid": "b33639f5-3e61-4132-80fc-92b074ea22a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.987485] env[70020]: DEBUG oslo_concurrency.lockutils [None req-57a59576-14ab-49c9-9c2b-c87f4b756353 tempest-MultipleCreateTestJSON-634477930 tempest-MultipleCreateTestJSON-634477930-project-member] Lock "d65ab5e0-189c-43e1-accf-16248ad02852" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.749s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 958.196848] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618601, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.219761] env[70020]: DEBUG oslo_vmware.api [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618600, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.401949] env[70020]: DEBUG oslo_concurrency.lockutils [req-48f14c0f-4d76-4188-a3e1-3a4d141e0f68 req-f325cf6d-5775-483e-921f-70fd0f8e91ce service nova] Releasing lock "refresh_cache-2ccd34c8-b433-41be-b800-d06a0595bff9" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 958.693419] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618601, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.719636] env[70020]: DEBUG oslo_vmware.api [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618600, 'name': CloneVM_Task, 'duration_secs': 1.500668} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.722091] env[70020]: INFO nova.virt.vmwareapi.vmops [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Created linked-clone VM from snapshot [ 958.723120] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1148d0c2-aafc-4714-80ef-7a884b61e107 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.734325] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Uploading image ce5b528a-e2a0-4108-a61b-8585c8e0dc08 {{(pid=70020) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 958.762938] env[70020]: DEBUG oslo_vmware.rw_handles [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 958.762938] env[70020]: value = "vm-721743" [ 958.762938] env[70020]: _type = "VirtualMachine" [ 958.762938] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 958.763405] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-d2a24f51-9534-41fd-8937-627bb302cfd6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.774085] env[70020]: DEBUG oslo_vmware.rw_handles [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Lease: (returnval){ [ 958.774085] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]523838a1-d739-741a-c077-8cf94d5b93b1" [ 958.774085] env[70020]: _type = "HttpNfcLease" [ 958.774085] env[70020]: } obtained for exporting VM: (result){ [ 958.774085] env[70020]: value = "vm-721743" [ 958.774085] env[70020]: _type = "VirtualMachine" [ 958.774085] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 958.774085] env[70020]: DEBUG oslo_vmware.api [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for the lease: (returnval){ [ 958.774085] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]523838a1-d739-741a-c077-8cf94d5b93b1" [ 958.774085] env[70020]: _type = "HttpNfcLease" [ 958.774085] env[70020]: } to be ready. {{(pid=70020) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 958.780516] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 958.780516] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]523838a1-d739-741a-c077-8cf94d5b93b1" [ 958.780516] env[70020]: _type = "HttpNfcLease" [ 958.780516] env[70020]: } is initializing. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 958.876353] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56d87cca-0f3d-4259-931d-d97dd224e0ef {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.884256] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ec48f2d-7bea-43d2-883b-c3fb6f2b5f99 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.914436] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97c55467-b945-45b6-bb95-392a7c1a632a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.921858] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-069e7aa5-f2f1-413d-89f3-c17444706e44 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.934981] env[70020]: DEBUG nova.compute.provider_tree [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 959.195575] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618601, 'name': CreateVM_Task, 'duration_secs': 1.42207} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.195948] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 959.196593] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.196593] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 959.196880] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 959.197151] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-492a7679-c6eb-4bfe-8a85-a893019cddc7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.201506] env[70020]: DEBUG oslo_vmware.api [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for the task: (returnval){ [ 959.201506] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]529f8947-6882-0725-29e3-c74623e4f104" [ 959.201506] env[70020]: _type = "Task" [ 959.201506] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.209407] env[70020]: DEBUG oslo_vmware.api [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]529f8947-6882-0725-29e3-c74623e4f104, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.283131] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 959.283131] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]523838a1-d739-741a-c077-8cf94d5b93b1" [ 959.283131] env[70020]: _type = "HttpNfcLease" [ 959.283131] env[70020]: } is ready. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 959.283492] env[70020]: DEBUG oslo_vmware.rw_handles [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 959.283492] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]523838a1-d739-741a-c077-8cf94d5b93b1" [ 959.283492] env[70020]: _type = "HttpNfcLease" [ 959.283492] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 959.284445] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8051355-c2a3-46fd-b016-ca34dafa7446 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.292168] env[70020]: DEBUG oslo_vmware.rw_handles [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52416d62-6248-f696-dc01-719085fd6094/disk-0.vmdk from lease info. {{(pid=70020) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 959.292350] env[70020]: DEBUG oslo_vmware.rw_handles [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52416d62-6248-f696-dc01-719085fd6094/disk-0.vmdk for reading. {{(pid=70020) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 959.388562] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f2b1372e-de68-4e7e-a548-ad933670a606 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.438681] env[70020]: DEBUG nova.scheduler.client.report [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 959.713400] env[70020]: DEBUG oslo_vmware.api [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]529f8947-6882-0725-29e3-c74623e4f104, 'name': SearchDatastore_Task, 'duration_secs': 0.009786} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.713751] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 959.714029] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 959.714283] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.714435] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 959.714665] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 959.714949] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b174ffd2-d779-471c-a1b2-dd1419dcf825 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.724275] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 959.724548] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 959.725316] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2096fc63-f217-4b0b-9bec-2db31741c289 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.737887] env[70020]: DEBUG oslo_vmware.api [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for the task: (returnval){ [ 959.737887] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]520fe6a1-61e7-56b1-ad2f-9a8b10847321" [ 959.737887] env[70020]: _type = "Task" [ 959.737887] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.750138] env[70020]: DEBUG oslo_vmware.api [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]520fe6a1-61e7-56b1-ad2f-9a8b10847321, 'name': SearchDatastore_Task, 'duration_secs': 0.00955} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.751175] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-136555ef-f886-4dea-a9ec-010473d7e01a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.757598] env[70020]: DEBUG oslo_vmware.api [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for the task: (returnval){ [ 959.757598] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]522b4e2e-f0a6-bf22-dc34-7e38b80bee6b" [ 959.757598] env[70020]: _type = "Task" [ 959.757598] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.766840] env[70020]: DEBUG oslo_vmware.api [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]522b4e2e-f0a6-bf22-dc34-7e38b80bee6b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.944125] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.488s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 959.944685] env[70020]: DEBUG nova.compute.manager [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 959.949019] env[70020]: DEBUG oslo_concurrency.lockutils [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.853s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 959.949206] env[70020]: INFO nova.compute.claims [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 960.268529] env[70020]: DEBUG oslo_vmware.api [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]522b4e2e-f0a6-bf22-dc34-7e38b80bee6b, 'name': SearchDatastore_Task, 'duration_secs': 0.009231} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.268819] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 960.269065] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 2ccd34c8-b433-41be-b800-d06a0595bff9/2ccd34c8-b433-41be-b800-d06a0595bff9.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 960.269441] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4ca7f130-b4db-4e4d-a3ed-e3465359413e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.276607] env[70020]: DEBUG oslo_vmware.api [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for the task: (returnval){ [ 960.276607] env[70020]: value = "task-3618603" [ 960.276607] env[70020]: _type = "Task" [ 960.276607] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.285620] env[70020]: DEBUG oslo_vmware.api [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618603, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.460365] env[70020]: DEBUG nova.compute.utils [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 960.462197] env[70020]: DEBUG nova.compute.manager [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 960.462397] env[70020]: DEBUG nova.network.neutron [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: d45966fe-98ff-4466-8e7e-90550034742f] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 960.521011] env[70020]: DEBUG nova.policy [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '33279b0a8dc848ceb443776f840845c3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '16f59a8f930846ec9299416b9ec5dd48', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 960.787507] env[70020]: DEBUG oslo_vmware.api [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618603, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.478276} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.787902] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 2ccd34c8-b433-41be-b800-d06a0595bff9/2ccd34c8-b433-41be-b800-d06a0595bff9.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 960.788145] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 960.788421] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-30958a6a-489e-4912-8c01-24768ebae5f3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.796344] env[70020]: DEBUG oslo_vmware.api [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for the task: (returnval){ [ 960.796344] env[70020]: value = "task-3618604" [ 960.796344] env[70020]: _type = "Task" [ 960.796344] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.808018] env[70020]: DEBUG oslo_vmware.api [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618604, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.895868] env[70020]: DEBUG nova.network.neutron [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Successfully created port: 998637c4-6d93-4002-8b56-ee2560a41b7c {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 960.968350] env[70020]: DEBUG nova.compute.manager [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 961.306510] env[70020]: DEBUG oslo_vmware.api [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618604, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071286} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.307187] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 961.307671] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5610346-6beb-40e3-809c-97c9a827028f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.334866] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] 2ccd34c8-b433-41be-b800-d06a0595bff9/2ccd34c8-b433-41be-b800-d06a0595bff9.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 961.337991] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b2ee4689-d1f6-4c1c-a5c6-0617ceeb21b8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.358693] env[70020]: DEBUG oslo_vmware.api [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for the task: (returnval){ [ 961.358693] env[70020]: value = "task-3618605" [ 961.358693] env[70020]: _type = "Task" [ 961.358693] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.368941] env[70020]: DEBUG oslo_vmware.api [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618605, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.442282] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3506ccab-3877-4afd-85b8-b7bde186bbb0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.450876] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c185e45b-7ab4-4cab-a137-b5d05e833512 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.488554] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-900a09da-553d-4a66-b162-15a1caf17585 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.497441] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-331b57b5-0edb-429f-9524-7a859e2eadb4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.513934] env[70020]: DEBUG nova.compute.provider_tree [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 961.869413] env[70020]: DEBUG oslo_vmware.api [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618605, 'name': ReconfigVM_Task, 'duration_secs': 0.275126} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.869959] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Reconfigured VM instance instance-00000050 to attach disk [datastore2] 2ccd34c8-b433-41be-b800-d06a0595bff9/2ccd34c8-b433-41be-b800-d06a0595bff9.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 961.870903] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-468fb41c-8331-4691-82f1-68f7f7c97008 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.877998] env[70020]: DEBUG oslo_vmware.api [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for the task: (returnval){ [ 961.877998] env[70020]: value = "task-3618606" [ 961.877998] env[70020]: _type = "Task" [ 961.877998] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.886261] env[70020]: DEBUG oslo_vmware.api [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618606, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.993981] env[70020]: DEBUG nova.compute.manager [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 962.020650] env[70020]: DEBUG nova.virt.hardware [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 962.021556] env[70020]: DEBUG nova.virt.hardware [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 962.021556] env[70020]: DEBUG nova.virt.hardware [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 962.021556] env[70020]: DEBUG nova.virt.hardware [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 962.021556] env[70020]: DEBUG nova.virt.hardware [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 962.021556] env[70020]: DEBUG nova.virt.hardware [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 962.021868] env[70020]: DEBUG nova.virt.hardware [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 962.021868] env[70020]: DEBUG nova.virt.hardware [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 962.022046] env[70020]: DEBUG nova.virt.hardware [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 962.022200] env[70020]: DEBUG nova.virt.hardware [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 962.022366] env[70020]: DEBUG nova.virt.hardware [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 962.023227] env[70020]: DEBUG nova.scheduler.client.report [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 962.027372] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8feab20-67f5-4c48-903d-f6cd184639b6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.037115] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b32c001f-69a1-48b9-961f-4bb1a41f6ac9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.335764] env[70020]: DEBUG nova.compute.manager [req-54e60f60-3018-41bb-a3ca-f1a46184d2c9 req-59bb5e07-4822-4d83-b58c-dfad144c2f13 service nova] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Received event network-vif-plugged-998637c4-6d93-4002-8b56-ee2560a41b7c {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 962.336350] env[70020]: DEBUG oslo_concurrency.lockutils [req-54e60f60-3018-41bb-a3ca-f1a46184d2c9 req-59bb5e07-4822-4d83-b58c-dfad144c2f13 service nova] Acquiring lock "d45966fe-98ff-4466-8e7e-90550034742f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 962.336575] env[70020]: DEBUG oslo_concurrency.lockutils [req-54e60f60-3018-41bb-a3ca-f1a46184d2c9 req-59bb5e07-4822-4d83-b58c-dfad144c2f13 service nova] Lock "d45966fe-98ff-4466-8e7e-90550034742f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 962.336742] env[70020]: DEBUG oslo_concurrency.lockutils [req-54e60f60-3018-41bb-a3ca-f1a46184d2c9 req-59bb5e07-4822-4d83-b58c-dfad144c2f13 service nova] Lock "d45966fe-98ff-4466-8e7e-90550034742f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 962.336953] env[70020]: DEBUG nova.compute.manager [req-54e60f60-3018-41bb-a3ca-f1a46184d2c9 req-59bb5e07-4822-4d83-b58c-dfad144c2f13 service nova] [instance: d45966fe-98ff-4466-8e7e-90550034742f] No waiting events found dispatching network-vif-plugged-998637c4-6d93-4002-8b56-ee2560a41b7c {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 962.337168] env[70020]: WARNING nova.compute.manager [req-54e60f60-3018-41bb-a3ca-f1a46184d2c9 req-59bb5e07-4822-4d83-b58c-dfad144c2f13 service nova] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Received unexpected event network-vif-plugged-998637c4-6d93-4002-8b56-ee2560a41b7c for instance with vm_state building and task_state spawning. [ 962.390466] env[70020]: DEBUG oslo_vmware.api [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618606, 'name': Rename_Task, 'duration_secs': 0.161259} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.390911] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 962.391192] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-82ae1939-b4be-4ea8-916d-f01cc1b2b4c3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.399781] env[70020]: DEBUG oslo_vmware.api [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for the task: (returnval){ [ 962.399781] env[70020]: value = "task-3618607" [ 962.399781] env[70020]: _type = "Task" [ 962.399781] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.408997] env[70020]: DEBUG oslo_vmware.api [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618607, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.428709] env[70020]: DEBUG nova.network.neutron [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Successfully updated port: 998637c4-6d93-4002-8b56-ee2560a41b7c {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 962.531462] env[70020]: DEBUG oslo_concurrency.lockutils [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.584s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 962.532028] env[70020]: DEBUG nova.compute.manager [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 962.535999] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.343s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 962.536869] env[70020]: INFO nova.compute.claims [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 962.913218] env[70020]: DEBUG oslo_vmware.api [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618607, 'name': PowerOnVM_Task, 'duration_secs': 0.457367} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.913513] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 962.913726] env[70020]: INFO nova.compute.manager [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Took 7.41 seconds to spawn the instance on the hypervisor. [ 962.914033] env[70020]: DEBUG nova.compute.manager [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 962.914894] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-136f5810-bb38-414e-80fb-6ce268cdd182 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.932012] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "refresh_cache-d45966fe-98ff-4466-8e7e-90550034742f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.932217] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquired lock "refresh_cache-d45966fe-98ff-4466-8e7e-90550034742f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 962.932416] env[70020]: DEBUG nova.network.neutron [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 963.043889] env[70020]: DEBUG nova.compute.utils [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 963.045508] env[70020]: DEBUG nova.compute.manager [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 963.045679] env[70020]: DEBUG nova.network.neutron [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 963.093550] env[70020]: DEBUG nova.policy [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd7316cb970904dd8b5b300ec05fb4166', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7412ed0b196c4d44b03bc93b0aae2954', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 963.372532] env[70020]: DEBUG nova.network.neutron [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Successfully created port: 9c6dd76e-1819-4f40-b5b1-e548b0c947ec {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 963.440595] env[70020]: INFO nova.compute.manager [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Took 42.76 seconds to build instance. [ 963.477197] env[70020]: DEBUG nova.network.neutron [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 963.553023] env[70020]: DEBUG nova.compute.manager [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 963.665061] env[70020]: DEBUG nova.network.neutron [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Updating instance_info_cache with network_info: [{"id": "998637c4-6d93-4002-8b56-ee2560a41b7c", "address": "fa:16:3e:37:69:bf", "network": {"id": "405de05e-83d0-46c5-9397-bb1ba00f7ab7", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-202335876-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16f59a8f930846ec9299416b9ec5dd48", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap998637c4-6d", "ovs_interfaceid": "998637c4-6d93-4002-8b56-ee2560a41b7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.944583] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5794bdfe-c1c0-4369-90b6-55a9721ce667 tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Lock "2ccd34c8-b433-41be-b800-d06a0595bff9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.932s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 963.979741] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-504cffab-af80-414a-a2d4-a69804b9edda {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.988584] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a120abb-5a6d-492e-a370-edbf1ae24fc3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.028663] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b67e6326-fc0f-48a6-8d12-71b68629dac0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.038346] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14c6d8a2-9efb-49ba-86f3-27f6b092d67a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.052660] env[70020]: DEBUG nova.compute.provider_tree [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 964.167860] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Releasing lock "refresh_cache-d45966fe-98ff-4466-8e7e-90550034742f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 964.168366] env[70020]: DEBUG nova.compute.manager [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Instance network_info: |[{"id": "998637c4-6d93-4002-8b56-ee2560a41b7c", "address": "fa:16:3e:37:69:bf", "network": {"id": "405de05e-83d0-46c5-9397-bb1ba00f7ab7", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-202335876-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16f59a8f930846ec9299416b9ec5dd48", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap998637c4-6d", "ovs_interfaceid": "998637c4-6d93-4002-8b56-ee2560a41b7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 964.168863] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:37:69:bf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f01bbee7-8b9a-46be-891e-59d8142fb359', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '998637c4-6d93-4002-8b56-ee2560a41b7c', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 964.179228] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 964.179961] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 964.180391] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c0342388-3d26-40c9-af0b-ff64dd5188a3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.209306] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 964.209306] env[70020]: value = "task-3618608" [ 964.209306] env[70020]: _type = "Task" [ 964.209306] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.219836] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618608, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.556191] env[70020]: DEBUG nova.scheduler.client.report [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 964.564341] env[70020]: DEBUG nova.compute.manager [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 964.594098] env[70020]: DEBUG nova.virt.hardware [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 964.594481] env[70020]: DEBUG nova.virt.hardware [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 964.594663] env[70020]: DEBUG nova.virt.hardware [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 964.594878] env[70020]: DEBUG nova.virt.hardware [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 964.595055] env[70020]: DEBUG nova.virt.hardware [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 964.595199] env[70020]: DEBUG nova.virt.hardware [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 964.595415] env[70020]: DEBUG nova.virt.hardware [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 964.595586] env[70020]: DEBUG nova.virt.hardware [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 964.595737] env[70020]: DEBUG nova.virt.hardware [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 964.595896] env[70020]: DEBUG nova.virt.hardware [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 964.596133] env[70020]: DEBUG nova.virt.hardware [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 964.597339] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c2faeb4-86f7-491f-bc44-0c7469705b57 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.605646] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4ae53c6-4309-48b2-b086-7830ee04ab3d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.678409] env[70020]: DEBUG nova.compute.manager [req-cb2c6a57-3df0-4e2d-b84e-fe7689725b7a req-def0082b-0a23-43b4-927a-7422e7a30224 service nova] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Received event network-changed-998637c4-6d93-4002-8b56-ee2560a41b7c {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 964.678681] env[70020]: DEBUG nova.compute.manager [req-cb2c6a57-3df0-4e2d-b84e-fe7689725b7a req-def0082b-0a23-43b4-927a-7422e7a30224 service nova] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Refreshing instance network info cache due to event network-changed-998637c4-6d93-4002-8b56-ee2560a41b7c. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 964.679037] env[70020]: DEBUG oslo_concurrency.lockutils [req-cb2c6a57-3df0-4e2d-b84e-fe7689725b7a req-def0082b-0a23-43b4-927a-7422e7a30224 service nova] Acquiring lock "refresh_cache-d45966fe-98ff-4466-8e7e-90550034742f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.680438] env[70020]: DEBUG oslo_concurrency.lockutils [req-cb2c6a57-3df0-4e2d-b84e-fe7689725b7a req-def0082b-0a23-43b4-927a-7422e7a30224 service nova] Acquired lock "refresh_cache-d45966fe-98ff-4466-8e7e-90550034742f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 964.680438] env[70020]: DEBUG nova.network.neutron [req-cb2c6a57-3df0-4e2d-b84e-fe7689725b7a req-def0082b-0a23-43b4-927a-7422e7a30224 service nova] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Refreshing network info cache for port 998637c4-6d93-4002-8b56-ee2560a41b7c {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 964.709700] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Acquiring lock "2ccd34c8-b433-41be-b800-d06a0595bff9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 964.709977] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Lock "2ccd34c8-b433-41be-b800-d06a0595bff9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 964.710207] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Acquiring lock "2ccd34c8-b433-41be-b800-d06a0595bff9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 964.710392] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Lock "2ccd34c8-b433-41be-b800-d06a0595bff9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 964.710563] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Lock "2ccd34c8-b433-41be-b800-d06a0595bff9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 964.716137] env[70020]: INFO nova.compute.manager [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Terminating instance [ 964.724369] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618608, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.898855] env[70020]: DEBUG nova.network.neutron [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Successfully updated port: 9c6dd76e-1819-4f40-b5b1-e548b0c947ec {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 965.061976] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.526s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 965.062599] env[70020]: DEBUG nova.compute.manager [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 965.065635] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.379s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 965.067078] env[70020]: INFO nova.compute.claims [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 965.221161] env[70020]: DEBUG nova.compute.manager [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 965.221616] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 965.227637] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-021e03ce-1176-4ee4-a43e-847eb1632646 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.230754] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618608, 'name': CreateVM_Task, 'duration_secs': 0.599778} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.230754] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 965.231889] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.231889] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 965.232044] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 965.232289] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6123efc-4f6a-4d92-a520-2fbaa38e83d4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.236189] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 965.236787] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dc308648-ff64-4531-90a1-517ae2a76b97 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.239508] env[70020]: DEBUG oslo_vmware.api [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 965.239508] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52123dfc-d475-732b-0de5-605033bbd800" [ 965.239508] env[70020]: _type = "Task" [ 965.239508] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.244953] env[70020]: DEBUG oslo_vmware.api [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for the task: (returnval){ [ 965.244953] env[70020]: value = "task-3618609" [ 965.244953] env[70020]: _type = "Task" [ 965.244953] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.247992] env[70020]: DEBUG oslo_vmware.api [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52123dfc-d475-732b-0de5-605033bbd800, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.257910] env[70020]: DEBUG oslo_vmware.api [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618609, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.389738] env[70020]: DEBUG nova.network.neutron [req-cb2c6a57-3df0-4e2d-b84e-fe7689725b7a req-def0082b-0a23-43b4-927a-7422e7a30224 service nova] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Updated VIF entry in instance network info cache for port 998637c4-6d93-4002-8b56-ee2560a41b7c. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 965.390145] env[70020]: DEBUG nova.network.neutron [req-cb2c6a57-3df0-4e2d-b84e-fe7689725b7a req-def0082b-0a23-43b4-927a-7422e7a30224 service nova] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Updating instance_info_cache with network_info: [{"id": "998637c4-6d93-4002-8b56-ee2560a41b7c", "address": "fa:16:3e:37:69:bf", "network": {"id": "405de05e-83d0-46c5-9397-bb1ba00f7ab7", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-202335876-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16f59a8f930846ec9299416b9ec5dd48", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap998637c4-6d", "ovs_interfaceid": "998637c4-6d93-4002-8b56-ee2560a41b7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.401342] env[70020]: DEBUG oslo_concurrency.lockutils [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Acquiring lock "refresh_cache-da07cb36-244f-4f48-a5b6-8d00324c1edf" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.401652] env[70020]: DEBUG oslo_concurrency.lockutils [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Acquired lock "refresh_cache-da07cb36-244f-4f48-a5b6-8d00324c1edf" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 965.401909] env[70020]: DEBUG nova.network.neutron [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 965.571950] env[70020]: DEBUG nova.compute.utils [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 965.573960] env[70020]: DEBUG nova.compute.manager [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 965.574205] env[70020]: DEBUG nova.network.neutron [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 965.613734] env[70020]: DEBUG nova.policy [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f145e14948f241829c262c46c5321c28', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6c8373e835ad4420890442390872c6fe', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 965.756549] env[70020]: DEBUG oslo_vmware.api [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52123dfc-d475-732b-0de5-605033bbd800, 'name': SearchDatastore_Task, 'duration_secs': 0.016797} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.756916] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 965.757192] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 965.757458] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.757615] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 965.758048] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 965.758575] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1be94fb6-626a-4ad6-8005-6c7163ce0874 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.766328] env[70020]: DEBUG oslo_vmware.api [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618609, 'name': PowerOffVM_Task, 'duration_secs': 0.304937} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.766328] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 965.766328] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 965.766328] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-afcce2e5-118c-4fdc-8ca7-5579d7594198 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.778151] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 965.778151] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 965.778151] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-198b55c0-ed4d-469d-82f8-a8508aba64c6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.781885] env[70020]: DEBUG oslo_vmware.api [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 965.781885] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]524a5c83-380a-b7c2-ec1f-a2ee6343e281" [ 965.781885] env[70020]: _type = "Task" [ 965.781885] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.789203] env[70020]: DEBUG oslo_vmware.api [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]524a5c83-380a-b7c2-ec1f-a2ee6343e281, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.892785] env[70020]: DEBUG oslo_concurrency.lockutils [req-cb2c6a57-3df0-4e2d-b84e-fe7689725b7a req-def0082b-0a23-43b4-927a-7422e7a30224 service nova] Releasing lock "refresh_cache-d45966fe-98ff-4466-8e7e-90550034742f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 965.963262] env[70020]: DEBUG nova.network.neutron [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 966.002172] env[70020]: DEBUG nova.network.neutron [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Successfully created port: 46420036-2adf-470d-b041-a6487903eed6 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 966.086396] env[70020]: DEBUG nova.compute.manager [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 966.215482] env[70020]: DEBUG nova.network.neutron [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Updating instance_info_cache with network_info: [{"id": "9c6dd76e-1819-4f40-b5b1-e548b0c947ec", "address": "fa:16:3e:da:06:2a", "network": {"id": "1aa98d4f-b271-4d4d-b8d3-156403999ea9", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1993994671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7412ed0b196c4d44b03bc93b0aae2954", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "503991c4-44d0-42d9-aa03-5259331f1051", "external-id": "nsx-vlan-transportzone-3", "segmentation_id": 3, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c6dd76e-18", "ovs_interfaceid": "9c6dd76e-1819-4f40-b5b1-e548b0c947ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.291987] env[70020]: DEBUG oslo_vmware.api [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]524a5c83-380a-b7c2-ec1f-a2ee6343e281, 'name': SearchDatastore_Task, 'duration_secs': 0.014555} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.296304] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28b8f7cc-d6a4-488f-ab73-1c3e0e48f775 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.298956] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 966.299149] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 966.299320] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Deleting the datastore file [datastore2] 2ccd34c8-b433-41be-b800-d06a0595bff9 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 966.299971] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-720c3995-db37-4860-a6c8-60a4edfd8075 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.305561] env[70020]: DEBUG oslo_vmware.api [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for the task: (returnval){ [ 966.305561] env[70020]: value = "task-3618611" [ 966.305561] env[70020]: _type = "Task" [ 966.305561] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.306823] env[70020]: DEBUG oslo_vmware.api [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 966.306823] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52a7ba03-4b07-8fea-7f65-20e59f70d923" [ 966.306823] env[70020]: _type = "Task" [ 966.306823] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.319627] env[70020]: DEBUG oslo_vmware.api [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618611, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.323138] env[70020]: DEBUG oslo_vmware.api [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52a7ba03-4b07-8fea-7f65-20e59f70d923, 'name': SearchDatastore_Task, 'duration_secs': 0.01144} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.323305] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 966.323558] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] d45966fe-98ff-4466-8e7e-90550034742f/d45966fe-98ff-4466-8e7e-90550034742f.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 966.323849] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bfeef9cd-4aea-4550-aa67-1eda0a2e40de {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.330319] env[70020]: DEBUG oslo_vmware.api [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 966.330319] env[70020]: value = "task-3618612" [ 966.330319] env[70020]: _type = "Task" [ 966.330319] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.340779] env[70020]: DEBUG oslo_vmware.api [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618612, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.521234] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e38fef77-d30c-46af-b69d-559f820af487 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.530708] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e150878a-5ab9-49e3-92a3-57f42f7f317c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.565039] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6184474-c7cc-432b-9183-45e0a62de8ec {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.573275] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-586491db-cf9c-4ccd-a057-7ca9b7c86ed4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.588333] env[70020]: DEBUG nova.compute.provider_tree [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 966.721512] env[70020]: DEBUG oslo_concurrency.lockutils [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Releasing lock "refresh_cache-da07cb36-244f-4f48-a5b6-8d00324c1edf" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 966.722343] env[70020]: DEBUG nova.compute.manager [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Instance network_info: |[{"id": "9c6dd76e-1819-4f40-b5b1-e548b0c947ec", "address": "fa:16:3e:da:06:2a", "network": {"id": "1aa98d4f-b271-4d4d-b8d3-156403999ea9", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1993994671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7412ed0b196c4d44b03bc93b0aae2954", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "503991c4-44d0-42d9-aa03-5259331f1051", "external-id": "nsx-vlan-transportzone-3", "segmentation_id": 3, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c6dd76e-18", "ovs_interfaceid": "9c6dd76e-1819-4f40-b5b1-e548b0c947ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 966.722343] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:da:06:2a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '503991c4-44d0-42d9-aa03-5259331f1051', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9c6dd76e-1819-4f40-b5b1-e548b0c947ec', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 966.730548] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 966.730794] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 966.731033] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-47a3300c-7093-412c-932d-3777e1b4b2f6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.753602] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 966.753602] env[70020]: value = "task-3618613" [ 966.753602] env[70020]: _type = "Task" [ 966.753602] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.762690] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618613, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.816327] env[70020]: DEBUG oslo_vmware.api [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Task: {'id': task-3618611, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159333} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.816617] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 966.816827] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 966.817047] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 966.817492] env[70020]: INFO nova.compute.manager [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Took 1.60 seconds to destroy the instance on the hypervisor. [ 966.817492] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 966.818291] env[70020]: DEBUG nova.compute.manager [-] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 966.818291] env[70020]: DEBUG nova.network.neutron [-] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 966.828614] env[70020]: DEBUG nova.compute.manager [req-b67e4bff-5a2e-4327-be19-6bdee52ed998 req-106b7b31-269a-45eb-aed2-1dab9d5c5c54 service nova] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Received event network-vif-plugged-9c6dd76e-1819-4f40-b5b1-e548b0c947ec {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 966.828614] env[70020]: DEBUG oslo_concurrency.lockutils [req-b67e4bff-5a2e-4327-be19-6bdee52ed998 req-106b7b31-269a-45eb-aed2-1dab9d5c5c54 service nova] Acquiring lock "da07cb36-244f-4f48-a5b6-8d00324c1edf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 966.828614] env[70020]: DEBUG oslo_concurrency.lockutils [req-b67e4bff-5a2e-4327-be19-6bdee52ed998 req-106b7b31-269a-45eb-aed2-1dab9d5c5c54 service nova] Lock "da07cb36-244f-4f48-a5b6-8d00324c1edf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 966.828614] env[70020]: DEBUG oslo_concurrency.lockutils [req-b67e4bff-5a2e-4327-be19-6bdee52ed998 req-106b7b31-269a-45eb-aed2-1dab9d5c5c54 service nova] Lock "da07cb36-244f-4f48-a5b6-8d00324c1edf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 966.828614] env[70020]: DEBUG nova.compute.manager [req-b67e4bff-5a2e-4327-be19-6bdee52ed998 req-106b7b31-269a-45eb-aed2-1dab9d5c5c54 service nova] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] No waiting events found dispatching network-vif-plugged-9c6dd76e-1819-4f40-b5b1-e548b0c947ec {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 966.829354] env[70020]: WARNING nova.compute.manager [req-b67e4bff-5a2e-4327-be19-6bdee52ed998 req-106b7b31-269a-45eb-aed2-1dab9d5c5c54 service nova] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Received unexpected event network-vif-plugged-9c6dd76e-1819-4f40-b5b1-e548b0c947ec for instance with vm_state building and task_state spawning. [ 966.829354] env[70020]: DEBUG nova.compute.manager [req-b67e4bff-5a2e-4327-be19-6bdee52ed998 req-106b7b31-269a-45eb-aed2-1dab9d5c5c54 service nova] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Received event network-changed-9c6dd76e-1819-4f40-b5b1-e548b0c947ec {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 966.829354] env[70020]: DEBUG nova.compute.manager [req-b67e4bff-5a2e-4327-be19-6bdee52ed998 req-106b7b31-269a-45eb-aed2-1dab9d5c5c54 service nova] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Refreshing instance network info cache due to event network-changed-9c6dd76e-1819-4f40-b5b1-e548b0c947ec. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 966.829354] env[70020]: DEBUG oslo_concurrency.lockutils [req-b67e4bff-5a2e-4327-be19-6bdee52ed998 req-106b7b31-269a-45eb-aed2-1dab9d5c5c54 service nova] Acquiring lock "refresh_cache-da07cb36-244f-4f48-a5b6-8d00324c1edf" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.829549] env[70020]: DEBUG oslo_concurrency.lockutils [req-b67e4bff-5a2e-4327-be19-6bdee52ed998 req-106b7b31-269a-45eb-aed2-1dab9d5c5c54 service nova] Acquired lock "refresh_cache-da07cb36-244f-4f48-a5b6-8d00324c1edf" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 966.829549] env[70020]: DEBUG nova.network.neutron [req-b67e4bff-5a2e-4327-be19-6bdee52ed998 req-106b7b31-269a-45eb-aed2-1dab9d5c5c54 service nova] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Refreshing network info cache for port 9c6dd76e-1819-4f40-b5b1-e548b0c947ec {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 966.842224] env[70020]: DEBUG oslo_vmware.api [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618612, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.464854} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.843046] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] d45966fe-98ff-4466-8e7e-90550034742f/d45966fe-98ff-4466-8e7e-90550034742f.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 966.843301] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 966.843610] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dbc7a9e5-7336-4a22-8d48-6232d0d9ce53 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.853616] env[70020]: DEBUG oslo_vmware.api [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 966.853616] env[70020]: value = "task-3618614" [ 966.853616] env[70020]: _type = "Task" [ 966.853616] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.862596] env[70020]: DEBUG oslo_vmware.api [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618614, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.091608] env[70020]: DEBUG nova.scheduler.client.report [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 967.104672] env[70020]: DEBUG nova.compute.manager [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 967.131643] env[70020]: DEBUG nova.virt.hardware [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 967.131792] env[70020]: DEBUG nova.virt.hardware [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 967.132344] env[70020]: DEBUG nova.virt.hardware [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 967.132344] env[70020]: DEBUG nova.virt.hardware [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 967.132344] env[70020]: DEBUG nova.virt.hardware [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 967.132564] env[70020]: DEBUG nova.virt.hardware [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 967.132768] env[70020]: DEBUG nova.virt.hardware [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 967.132951] env[70020]: DEBUG nova.virt.hardware [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 967.133112] env[70020]: DEBUG nova.virt.hardware [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 967.134112] env[70020]: DEBUG nova.virt.hardware [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 967.134112] env[70020]: DEBUG nova.virt.hardware [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 967.134663] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7201a647-464e-461f-a6a7-00997adbd2d2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.142607] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60271e0f-27dd-4e89-88c6-d644e91283ec {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.188026] env[70020]: DEBUG nova.compute.manager [req-e938f188-df00-4a3e-954f-843b88bd7ddc req-2ef46391-ab24-4a1f-93f8-3f1681b25b1f service nova] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Received event network-vif-deleted-b33639f5-3e61-4132-80fc-92b074ea22a1 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 967.188165] env[70020]: INFO nova.compute.manager [req-e938f188-df00-4a3e-954f-843b88bd7ddc req-2ef46391-ab24-4a1f-93f8-3f1681b25b1f service nova] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Neutron deleted interface b33639f5-3e61-4132-80fc-92b074ea22a1; detaching it from the instance and deleting it from the info cache [ 967.188490] env[70020]: DEBUG nova.network.neutron [req-e938f188-df00-4a3e-954f-843b88bd7ddc req-2ef46391-ab24-4a1f-93f8-3f1681b25b1f service nova] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.264274] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618613, 'name': CreateVM_Task, 'duration_secs': 0.372377} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.264458] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 967.265940] env[70020]: DEBUG oslo_concurrency.lockutils [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 967.265940] env[70020]: DEBUG oslo_concurrency.lockutils [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 967.265940] env[70020]: DEBUG oslo_concurrency.lockutils [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 967.266211] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9fbaede9-cadc-4724-bea7-d03aebeacd56 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.271343] env[70020]: DEBUG oslo_vmware.api [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for the task: (returnval){ [ 967.271343] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52b726b9-a80f-4368-5bc0-e9711db03e1d" [ 967.271343] env[70020]: _type = "Task" [ 967.271343] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.279743] env[70020]: DEBUG oslo_vmware.api [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b726b9-a80f-4368-5bc0-e9711db03e1d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.363550] env[70020]: DEBUG oslo_vmware.api [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618614, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07558} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.363916] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 967.364726] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df27b5b5-41c2-4f70-9325-068740530767 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.389755] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] d45966fe-98ff-4466-8e7e-90550034742f/d45966fe-98ff-4466-8e7e-90550034742f.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 967.390087] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-100de0c9-4fd1-45b9-87cd-de526e5d48fc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.409346] env[70020]: DEBUG oslo_vmware.api [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 967.409346] env[70020]: value = "task-3618615" [ 967.409346] env[70020]: _type = "Task" [ 967.409346] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.417282] env[70020]: DEBUG oslo_vmware.api [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618615, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.500135] env[70020]: DEBUG nova.network.neutron [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Successfully updated port: 46420036-2adf-470d-b041-a6487903eed6 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 967.597369] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.532s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 967.597876] env[70020]: DEBUG nova.compute.manager [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 967.600387] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fc70e12b-61ca-4960-834d-ce5ce953214a tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.647s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 967.600608] env[70020]: DEBUG nova.objects.instance [None req-fc70e12b-61ca-4960-834d-ce5ce953214a tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Lazy-loading 'resources' on Instance uuid c9ce57f3-f9a2-40aa-b7eb-403840c34304 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 967.653617] env[70020]: DEBUG nova.network.neutron [req-b67e4bff-5a2e-4327-be19-6bdee52ed998 req-106b7b31-269a-45eb-aed2-1dab9d5c5c54 service nova] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Updated VIF entry in instance network info cache for port 9c6dd76e-1819-4f40-b5b1-e548b0c947ec. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 967.654044] env[70020]: DEBUG nova.network.neutron [req-b67e4bff-5a2e-4327-be19-6bdee52ed998 req-106b7b31-269a-45eb-aed2-1dab9d5c5c54 service nova] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Updating instance_info_cache with network_info: [{"id": "9c6dd76e-1819-4f40-b5b1-e548b0c947ec", "address": "fa:16:3e:da:06:2a", "network": {"id": "1aa98d4f-b271-4d4d-b8d3-156403999ea9", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1993994671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7412ed0b196c4d44b03bc93b0aae2954", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "503991c4-44d0-42d9-aa03-5259331f1051", "external-id": "nsx-vlan-transportzone-3", "segmentation_id": 3, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c6dd76e-18", "ovs_interfaceid": "9c6dd76e-1819-4f40-b5b1-e548b0c947ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.673025] env[70020]: DEBUG nova.network.neutron [-] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.690934] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c3ab8ee7-646d-4956-9131-b2116ae886d5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.701401] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75af314c-114f-4ab7-9dff-e73912f2fa5d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.734613] env[70020]: DEBUG nova.compute.manager [req-e938f188-df00-4a3e-954f-843b88bd7ddc req-2ef46391-ab24-4a1f-93f8-3f1681b25b1f service nova] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Detach interface failed, port_id=b33639f5-3e61-4132-80fc-92b074ea22a1, reason: Instance 2ccd34c8-b433-41be-b800-d06a0595bff9 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 967.782132] env[70020]: DEBUG oslo_vmware.api [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b726b9-a80f-4368-5bc0-e9711db03e1d, 'name': SearchDatastore_Task, 'duration_secs': 0.011293} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.782486] env[70020]: DEBUG oslo_concurrency.lockutils [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 967.782731] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 967.782987] env[70020]: DEBUG oslo_concurrency.lockutils [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 967.783156] env[70020]: DEBUG oslo_concurrency.lockutils [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 967.783355] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 967.783630] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e82f2d08-5d5a-44c4-aa82-140e163ff958 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.791927] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 967.792107] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 967.792846] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a5f87e6-0b1f-4044-9ebc-c9ec764b55f1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.797736] env[70020]: DEBUG oslo_vmware.api [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for the task: (returnval){ [ 967.797736] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52164b0d-0e11-a146-cb61-3f3b27826aa0" [ 967.797736] env[70020]: _type = "Task" [ 967.797736] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.805660] env[70020]: DEBUG oslo_vmware.api [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52164b0d-0e11-a146-cb61-3f3b27826aa0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.921818] env[70020]: DEBUG oslo_vmware.api [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618615, 'name': ReconfigVM_Task, 'duration_secs': 0.446616} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.922113] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Reconfigured VM instance instance-00000051 to attach disk [datastore1] d45966fe-98ff-4466-8e7e-90550034742f/d45966fe-98ff-4466-8e7e-90550034742f.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 967.922749] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-22808e5f-5f08-447f-bf20-272f15a8989f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.929063] env[70020]: DEBUG oslo_vmware.api [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 967.929063] env[70020]: value = "task-3618616" [ 967.929063] env[70020]: _type = "Task" [ 967.929063] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.936706] env[70020]: DEBUG oslo_vmware.api [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618616, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.000433] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Acquiring lock "refresh_cache-a39731d2-0b9b-41fa-b9ac-f80193a26d20" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.000594] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Acquired lock "refresh_cache-a39731d2-0b9b-41fa-b9ac-f80193a26d20" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 968.000751] env[70020]: DEBUG nova.network.neutron [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 968.104066] env[70020]: DEBUG nova.compute.utils [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 968.108807] env[70020]: DEBUG nova.compute.manager [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 968.108807] env[70020]: DEBUG nova.network.neutron [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 968.157496] env[70020]: DEBUG oslo_concurrency.lockutils [req-b67e4bff-5a2e-4327-be19-6bdee52ed998 req-106b7b31-269a-45eb-aed2-1dab9d5c5c54 service nova] Releasing lock "refresh_cache-da07cb36-244f-4f48-a5b6-8d00324c1edf" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 968.176820] env[70020]: DEBUG oslo_vmware.rw_handles [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52416d62-6248-f696-dc01-719085fd6094/disk-0.vmdk. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 968.177412] env[70020]: INFO nova.compute.manager [-] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Took 1.36 seconds to deallocate network for instance. [ 968.178588] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29269f7d-1ac1-4b12-8e24-38af59055468 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.183520] env[70020]: DEBUG nova.policy [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ba70b922029647a39d6fad8b4a3c5690', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5e4a0dd86eab468fb76764252de97ffd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 968.199762] env[70020]: DEBUG oslo_vmware.rw_handles [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52416d62-6248-f696-dc01-719085fd6094/disk-0.vmdk is in state: ready. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 968.199929] env[70020]: ERROR oslo_vmware.rw_handles [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52416d62-6248-f696-dc01-719085fd6094/disk-0.vmdk due to incomplete transfer. [ 968.200169] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-7395ec9e-e459-4bd1-b66a-dba439af18ba {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.207622] env[70020]: DEBUG oslo_vmware.rw_handles [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52416d62-6248-f696-dc01-719085fd6094/disk-0.vmdk. {{(pid=70020) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 968.207622] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Uploaded image ce5b528a-e2a0-4108-a61b-8585c8e0dc08 to the Glance image server {{(pid=70020) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 968.209726] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Destroying the VM {{(pid=70020) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 968.212258] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-3d07fdd3-b485-45a0-bc7d-56396c43db83 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.219253] env[70020]: DEBUG oslo_vmware.api [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for the task: (returnval){ [ 968.219253] env[70020]: value = "task-3618617" [ 968.219253] env[70020]: _type = "Task" [ 968.219253] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.228852] env[70020]: DEBUG oslo_vmware.api [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618617, 'name': Destroy_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.308700] env[70020]: DEBUG oslo_vmware.api [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52164b0d-0e11-a146-cb61-3f3b27826aa0, 'name': SearchDatastore_Task, 'duration_secs': 0.012958} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.311872] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc9e56f4-fa62-4628-9b38-9d489dee8376 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.317260] env[70020]: DEBUG oslo_vmware.api [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for the task: (returnval){ [ 968.317260] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5230cfea-bbc3-7bca-d6ee-319268b0f39f" [ 968.317260] env[70020]: _type = "Task" [ 968.317260] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.325854] env[70020]: DEBUG oslo_vmware.api [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5230cfea-bbc3-7bca-d6ee-319268b0f39f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.440970] env[70020]: DEBUG oslo_vmware.api [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618616, 'name': Rename_Task, 'duration_secs': 0.218645} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.441327] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 968.441603] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5af95db7-8012-4db7-826e-2ff7103ab0d2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.451028] env[70020]: DEBUG oslo_vmware.api [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 968.451028] env[70020]: value = "task-3618618" [ 968.451028] env[70020]: _type = "Task" [ 968.451028] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.464708] env[70020]: DEBUG oslo_vmware.api [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618618, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.498607] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a98fa433-f593-4295-9720-d2526d9bbd82 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.507989] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95c01a7e-0d14-443d-bd07-d12f4960fb42 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.539481] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ec71bfc-1295-4ddb-9a8c-86cf24171a94 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.542776] env[70020]: DEBUG nova.network.neutron [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 968.549205] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98c950dd-c795-40e7-aa2a-9b9abc7be16b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.564031] env[70020]: DEBUG nova.compute.provider_tree [None req-fc70e12b-61ca-4960-834d-ce5ce953214a tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 968.612209] env[70020]: DEBUG nova.compute.manager [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 968.698523] env[70020]: DEBUG nova.network.neutron [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Updating instance_info_cache with network_info: [{"id": "46420036-2adf-470d-b041-a6487903eed6", "address": "fa:16:3e:b9:a2:b2", "network": {"id": "a4584b23-3c15-4ae9-b89e-0a0e14eeccb8", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1275535043-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6c8373e835ad4420890442390872c6fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46420036-2a", "ovs_interfaceid": "46420036-2adf-470d-b041-a6487903eed6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 968.700795] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.726627] env[70020]: DEBUG nova.network.neutron [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Successfully created port: 7e0131ef-1dbf-4927-bc49-8bcea6a75d84 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 968.739019] env[70020]: DEBUG oslo_vmware.api [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618617, 'name': Destroy_Task, 'duration_secs': 0.406762} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.739329] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Destroyed the VM [ 968.739581] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Deleting Snapshot of the VM instance {{(pid=70020) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 968.739832] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a8426ce0-5527-4386-9845-6a01bb7c0005 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.746940] env[70020]: DEBUG oslo_vmware.api [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for the task: (returnval){ [ 968.746940] env[70020]: value = "task-3618619" [ 968.746940] env[70020]: _type = "Task" [ 968.746940] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.755090] env[70020]: DEBUG oslo_vmware.api [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618619, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.828171] env[70020]: DEBUG oslo_vmware.api [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5230cfea-bbc3-7bca-d6ee-319268b0f39f, 'name': SearchDatastore_Task, 'duration_secs': 0.009774} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.828441] env[70020]: DEBUG oslo_concurrency.lockutils [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 968.828708] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] da07cb36-244f-4f48-a5b6-8d00324c1edf/da07cb36-244f-4f48-a5b6-8d00324c1edf.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 968.828980] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-697bde0c-6673-434a-842b-16c20e21ee35 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.835685] env[70020]: DEBUG oslo_vmware.api [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for the task: (returnval){ [ 968.835685] env[70020]: value = "task-3618620" [ 968.835685] env[70020]: _type = "Task" [ 968.835685] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.846362] env[70020]: DEBUG oslo_vmware.api [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618620, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.857534] env[70020]: DEBUG nova.compute.manager [req-9785176d-6f6e-43af-ba6f-fcf8b2ab5d1c req-d29da4fa-eb24-4b7d-8836-de4859274226 service nova] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Received event network-vif-plugged-46420036-2adf-470d-b041-a6487903eed6 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 968.857701] env[70020]: DEBUG oslo_concurrency.lockutils [req-9785176d-6f6e-43af-ba6f-fcf8b2ab5d1c req-d29da4fa-eb24-4b7d-8836-de4859274226 service nova] Acquiring lock "a39731d2-0b9b-41fa-b9ac-f80193a26d20-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.857909] env[70020]: DEBUG oslo_concurrency.lockutils [req-9785176d-6f6e-43af-ba6f-fcf8b2ab5d1c req-d29da4fa-eb24-4b7d-8836-de4859274226 service nova] Lock "a39731d2-0b9b-41fa-b9ac-f80193a26d20-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 968.858106] env[70020]: DEBUG oslo_concurrency.lockutils [req-9785176d-6f6e-43af-ba6f-fcf8b2ab5d1c req-d29da4fa-eb24-4b7d-8836-de4859274226 service nova] Lock "a39731d2-0b9b-41fa-b9ac-f80193a26d20-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 968.858279] env[70020]: DEBUG nova.compute.manager [req-9785176d-6f6e-43af-ba6f-fcf8b2ab5d1c req-d29da4fa-eb24-4b7d-8836-de4859274226 service nova] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] No waiting events found dispatching network-vif-plugged-46420036-2adf-470d-b041-a6487903eed6 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 968.858448] env[70020]: WARNING nova.compute.manager [req-9785176d-6f6e-43af-ba6f-fcf8b2ab5d1c req-d29da4fa-eb24-4b7d-8836-de4859274226 service nova] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Received unexpected event network-vif-plugged-46420036-2adf-470d-b041-a6487903eed6 for instance with vm_state building and task_state spawning. [ 968.858640] env[70020]: DEBUG nova.compute.manager [req-9785176d-6f6e-43af-ba6f-fcf8b2ab5d1c req-d29da4fa-eb24-4b7d-8836-de4859274226 service nova] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Received event network-changed-46420036-2adf-470d-b041-a6487903eed6 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 968.858792] env[70020]: DEBUG nova.compute.manager [req-9785176d-6f6e-43af-ba6f-fcf8b2ab5d1c req-d29da4fa-eb24-4b7d-8836-de4859274226 service nova] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Refreshing instance network info cache due to event network-changed-46420036-2adf-470d-b041-a6487903eed6. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 968.858957] env[70020]: DEBUG oslo_concurrency.lockutils [req-9785176d-6f6e-43af-ba6f-fcf8b2ab5d1c req-d29da4fa-eb24-4b7d-8836-de4859274226 service nova] Acquiring lock "refresh_cache-a39731d2-0b9b-41fa-b9ac-f80193a26d20" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.960500] env[70020]: DEBUG oslo_vmware.api [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618618, 'name': PowerOnVM_Task, 'duration_secs': 0.458863} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.960761] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 968.960958] env[70020]: INFO nova.compute.manager [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Took 6.97 seconds to spawn the instance on the hypervisor. [ 968.961168] env[70020]: DEBUG nova.compute.manager [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 968.963271] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eb5e4b9-c598-479f-bd26-9894f9b1c9fb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.069023] env[70020]: DEBUG nova.scheduler.client.report [None req-fc70e12b-61ca-4960-834d-ce5ce953214a tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 969.201249] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Releasing lock "refresh_cache-a39731d2-0b9b-41fa-b9ac-f80193a26d20" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 969.201631] env[70020]: DEBUG nova.compute.manager [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Instance network_info: |[{"id": "46420036-2adf-470d-b041-a6487903eed6", "address": "fa:16:3e:b9:a2:b2", "network": {"id": "a4584b23-3c15-4ae9-b89e-0a0e14eeccb8", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1275535043-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6c8373e835ad4420890442390872c6fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46420036-2a", "ovs_interfaceid": "46420036-2adf-470d-b041-a6487903eed6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 969.202063] env[70020]: DEBUG oslo_concurrency.lockutils [req-9785176d-6f6e-43af-ba6f-fcf8b2ab5d1c req-d29da4fa-eb24-4b7d-8836-de4859274226 service nova] Acquired lock "refresh_cache-a39731d2-0b9b-41fa-b9ac-f80193a26d20" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 969.202264] env[70020]: DEBUG nova.network.neutron [req-9785176d-6f6e-43af-ba6f-fcf8b2ab5d1c req-d29da4fa-eb24-4b7d-8836-de4859274226 service nova] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Refreshing network info cache for port 46420036-2adf-470d-b041-a6487903eed6 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 969.204336] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b9:a2:b2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '46420036-2adf-470d-b041-a6487903eed6', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 969.216244] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Creating folder: Project (6c8373e835ad4420890442390872c6fe). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 969.219827] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-209b69f9-0092-47cb-8ed8-a120afee9ce3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.232847] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Created folder: Project (6c8373e835ad4420890442390872c6fe) in parent group-v721521. [ 969.233073] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Creating folder: Instances. Parent ref: group-v721747. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 969.233329] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c3fda1f4-4189-4133-8d0c-bfd789262e4c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.243903] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Created folder: Instances in parent group-v721747. [ 969.244205] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 969.244424] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 969.244642] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1b89a123-2556-4c4b-a2f5-6c1b3e6c8d27 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.280686] env[70020]: DEBUG oslo_vmware.api [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618619, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.281929] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 969.281929] env[70020]: value = "task-3618623" [ 969.281929] env[70020]: _type = "Task" [ 969.281929] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.297388] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618623, 'name': CreateVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.346285] env[70020]: DEBUG oslo_vmware.api [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618620, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.461074} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.350987] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] da07cb36-244f-4f48-a5b6-8d00324c1edf/da07cb36-244f-4f48-a5b6-8d00324c1edf.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 969.350987] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 969.350987] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-937244c6-ef8f-44fb-bbb1-f8f3358fccf2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.356655] env[70020]: DEBUG oslo_vmware.api [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for the task: (returnval){ [ 969.356655] env[70020]: value = "task-3618624" [ 969.356655] env[70020]: _type = "Task" [ 969.356655] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.364331] env[70020]: DEBUG oslo_vmware.api [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618624, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.434119] env[70020]: DEBUG nova.network.neutron [req-9785176d-6f6e-43af-ba6f-fcf8b2ab5d1c req-d29da4fa-eb24-4b7d-8836-de4859274226 service nova] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Updated VIF entry in instance network info cache for port 46420036-2adf-470d-b041-a6487903eed6. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 969.434648] env[70020]: DEBUG nova.network.neutron [req-9785176d-6f6e-43af-ba6f-fcf8b2ab5d1c req-d29da4fa-eb24-4b7d-8836-de4859274226 service nova] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Updating instance_info_cache with network_info: [{"id": "46420036-2adf-470d-b041-a6487903eed6", "address": "fa:16:3e:b9:a2:b2", "network": {"id": "a4584b23-3c15-4ae9-b89e-0a0e14eeccb8", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1275535043-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6c8373e835ad4420890442390872c6fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46420036-2a", "ovs_interfaceid": "46420036-2adf-470d-b041-a6487903eed6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.479867] env[70020]: INFO nova.compute.manager [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Took 41.67 seconds to build instance. [ 969.572851] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fc70e12b-61ca-4960-834d-ce5ce953214a tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.972s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 969.575920] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e69847cc-877f-4186-afc9-55debae9dd34 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.377s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 969.576450] env[70020]: DEBUG nova.objects.instance [None req-e69847cc-877f-4186-afc9-55debae9dd34 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lazy-loading 'resources' on Instance uuid 8317f386-44d0-4b1b-8590-d0336fafac21 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 969.600053] env[70020]: INFO nova.scheduler.client.report [None req-fc70e12b-61ca-4960-834d-ce5ce953214a tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Deleted allocations for instance c9ce57f3-f9a2-40aa-b7eb-403840c34304 [ 969.625944] env[70020]: DEBUG nova.compute.manager [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 969.654314] env[70020]: DEBUG nova.virt.hardware [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 969.654638] env[70020]: DEBUG nova.virt.hardware [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 969.654719] env[70020]: DEBUG nova.virt.hardware [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 969.654921] env[70020]: DEBUG nova.virt.hardware [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 969.655082] env[70020]: DEBUG nova.virt.hardware [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 969.655228] env[70020]: DEBUG nova.virt.hardware [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 969.655434] env[70020]: DEBUG nova.virt.hardware [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 969.655585] env[70020]: DEBUG nova.virt.hardware [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 969.655748] env[70020]: DEBUG nova.virt.hardware [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 969.655909] env[70020]: DEBUG nova.virt.hardware [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 969.656095] env[70020]: DEBUG nova.virt.hardware [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 969.657220] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07af4692-1b83-43a5-b8b2-db671ceb47dd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.666355] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d562e2fa-7905-49a8-8b6e-117c499123bd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.778239] env[70020]: DEBUG oslo_vmware.api [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618619, 'name': RemoveSnapshot_Task, 'duration_secs': 0.92294} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.778556] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Deleted Snapshot of the VM instance {{(pid=70020) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 969.778835] env[70020]: DEBUG nova.compute.manager [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 969.779666] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7479e4f7-7197-463e-9283-f98d96f27bd6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.794233] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618623, 'name': CreateVM_Task, 'duration_secs': 0.468602} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.794409] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 969.795139] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.795307] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 969.795622] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 969.795855] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45b0f507-9d31-4fb8-9a13-3d9fc6b25dfa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.799955] env[70020]: DEBUG oslo_vmware.api [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for the task: (returnval){ [ 969.799955] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]525c12b5-6161-492e-8c47-615d08b19052" [ 969.799955] env[70020]: _type = "Task" [ 969.799955] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.808257] env[70020]: DEBUG oslo_vmware.api [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]525c12b5-6161-492e-8c47-615d08b19052, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.864720] env[70020]: DEBUG oslo_vmware.api [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618624, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.228849} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.865013] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 969.865855] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16bfa319-a889-4a77-9e30-1552461da32b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.889802] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] da07cb36-244f-4f48-a5b6-8d00324c1edf/da07cb36-244f-4f48-a5b6-8d00324c1edf.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 969.890537] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9a759233-b571-4f73-ac3c-c3d71474026e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.914506] env[70020]: DEBUG oslo_vmware.api [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for the task: (returnval){ [ 969.914506] env[70020]: value = "task-3618625" [ 969.914506] env[70020]: _type = "Task" [ 969.914506] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.926069] env[70020]: DEBUG oslo_vmware.api [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618625, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.937902] env[70020]: DEBUG oslo_concurrency.lockutils [req-9785176d-6f6e-43af-ba6f-fcf8b2ab5d1c req-d29da4fa-eb24-4b7d-8836-de4859274226 service nova] Releasing lock "refresh_cache-a39731d2-0b9b-41fa-b9ac-f80193a26d20" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 969.981648] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8574ffd1-be59-4ef9-b08e-a3ef415d87c5 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "d45966fe-98ff-4466-8e7e-90550034742f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.596s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 970.108085] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fc70e12b-61ca-4960-834d-ce5ce953214a tempest-SecurityGroupsTestJSON-917701892 tempest-SecurityGroupsTestJSON-917701892-project-member] Lock "c9ce57f3-f9a2-40aa-b7eb-403840c34304" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.836s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 970.293733] env[70020]: INFO nova.compute.manager [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Shelve offloading [ 970.311297] env[70020]: DEBUG oslo_vmware.api [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]525c12b5-6161-492e-8c47-615d08b19052, 'name': SearchDatastore_Task, 'duration_secs': 0.013136} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.312295] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 970.312426] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 970.312648] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.313330] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 970.313547] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 970.316545] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f82f4c39-f30d-4578-842a-9168ec33c05a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.329456] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 970.329456] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 970.329456] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-331bed6f-3e82-45db-a780-5c1270e5b1fe {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.336654] env[70020]: DEBUG oslo_vmware.api [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for the task: (returnval){ [ 970.336654] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52ed2cfc-4190-9048-e160-1e95d26b9cfa" [ 970.336654] env[70020]: _type = "Task" [ 970.336654] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.349988] env[70020]: DEBUG oslo_vmware.api [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ed2cfc-4190-9048-e160-1e95d26b9cfa, 'name': SearchDatastore_Task, 'duration_secs': 0.008731} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.353927] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6dda8956-517f-4fee-be4a-c9a4f0132e6e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.359590] env[70020]: DEBUG oslo_vmware.api [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for the task: (returnval){ [ 970.359590] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52d935a3-7639-4f6d-6b0e-23b9f213ea7e" [ 970.359590] env[70020]: _type = "Task" [ 970.359590] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.369817] env[70020]: DEBUG oslo_vmware.api [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52d935a3-7639-4f6d-6b0e-23b9f213ea7e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.429901] env[70020]: DEBUG oslo_vmware.api [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618625, 'name': ReconfigVM_Task, 'duration_secs': 0.334738} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.430178] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Reconfigured VM instance instance-00000052 to attach disk [datastore1] da07cb36-244f-4f48-a5b6-8d00324c1edf/da07cb36-244f-4f48-a5b6-8d00324c1edf.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 970.430817] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-63333492-e8fb-4f3f-ba2a-cc6a3f433551 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.438095] env[70020]: DEBUG oslo_vmware.api [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for the task: (returnval){ [ 970.438095] env[70020]: value = "task-3618626" [ 970.438095] env[70020]: _type = "Task" [ 970.438095] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.449601] env[70020]: DEBUG oslo_vmware.api [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618626, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.499657] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ea5cd39-641a-48bc-9d30-37c54e0ea078 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.508711] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f2d54c8-e825-416f-aee2-67728f7288c5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.552735] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b655ed4-f566-44e3-8ee2-603a01506586 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.567533] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fde8116b-0e58-4de4-9b23-605d55737fc5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.584510] env[70020]: DEBUG nova.compute.provider_tree [None req-e69847cc-877f-4186-afc9-55debae9dd34 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 970.800139] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 970.800536] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8ec787ca-5758-484d-8bd9-925e012aa46b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.807784] env[70020]: DEBUG oslo_vmware.api [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for the task: (returnval){ [ 970.807784] env[70020]: value = "task-3618627" [ 970.807784] env[70020]: _type = "Task" [ 970.807784] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.816072] env[70020]: DEBUG oslo_vmware.api [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618627, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.869071] env[70020]: DEBUG oslo_vmware.api [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52d935a3-7639-4f6d-6b0e-23b9f213ea7e, 'name': SearchDatastore_Task, 'duration_secs': 0.020177} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.869334] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 970.869690] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] a39731d2-0b9b-41fa-b9ac-f80193a26d20/a39731d2-0b9b-41fa-b9ac-f80193a26d20.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 970.869942] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-86d48018-02ff-41e6-b734-d9e79d334f39 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.875724] env[70020]: DEBUG oslo_vmware.api [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for the task: (returnval){ [ 970.875724] env[70020]: value = "task-3618628" [ 970.875724] env[70020]: _type = "Task" [ 970.875724] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.882776] env[70020]: DEBUG oslo_vmware.api [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618628, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.950019] env[70020]: DEBUG oslo_vmware.api [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618626, 'name': Rename_Task, 'duration_secs': 0.137804} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.950347] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 970.950599] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c2a8dad9-4f98-4e3d-9141-d6ce233a7267 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.956669] env[70020]: DEBUG oslo_vmware.api [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for the task: (returnval){ [ 970.956669] env[70020]: value = "task-3618629" [ 970.956669] env[70020]: _type = "Task" [ 970.956669] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.969134] env[70020]: DEBUG oslo_vmware.api [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618629, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.090898] env[70020]: DEBUG nova.scheduler.client.report [None req-e69847cc-877f-4186-afc9-55debae9dd34 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 971.242075] env[70020]: DEBUG nova.network.neutron [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Successfully updated port: 7e0131ef-1dbf-4927-bc49-8bcea6a75d84 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 971.319294] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] VM already powered off {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 971.319533] env[70020]: DEBUG nova.compute.manager [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 971.324024] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c857e436-15df-417a-b15e-e345e5727e4c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.327446] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Acquiring lock "refresh_cache-2198e7f8-5458-4b97-abb3-0a3c932cebc2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.327662] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Acquired lock "refresh_cache-2198e7f8-5458-4b97-abb3-0a3c932cebc2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 971.327843] env[70020]: DEBUG nova.network.neutron [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 971.369738] env[70020]: DEBUG nova.compute.manager [req-848f1416-dbd7-4905-96fa-f230ebc47e3a req-e0b865a6-fd9a-476b-9323-89a7e1eaed6a service nova] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Received event network-vif-plugged-7e0131ef-1dbf-4927-bc49-8bcea6a75d84 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 971.370040] env[70020]: DEBUG oslo_concurrency.lockutils [req-848f1416-dbd7-4905-96fa-f230ebc47e3a req-e0b865a6-fd9a-476b-9323-89a7e1eaed6a service nova] Acquiring lock "563512c2-b80f-4f14-add5-d48e2b7a0ee9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 971.370371] env[70020]: DEBUG oslo_concurrency.lockutils [req-848f1416-dbd7-4905-96fa-f230ebc47e3a req-e0b865a6-fd9a-476b-9323-89a7e1eaed6a service nova] Lock "563512c2-b80f-4f14-add5-d48e2b7a0ee9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 971.370652] env[70020]: DEBUG oslo_concurrency.lockutils [req-848f1416-dbd7-4905-96fa-f230ebc47e3a req-e0b865a6-fd9a-476b-9323-89a7e1eaed6a service nova] Lock "563512c2-b80f-4f14-add5-d48e2b7a0ee9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 971.370869] env[70020]: DEBUG nova.compute.manager [req-848f1416-dbd7-4905-96fa-f230ebc47e3a req-e0b865a6-fd9a-476b-9323-89a7e1eaed6a service nova] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] No waiting events found dispatching network-vif-plugged-7e0131ef-1dbf-4927-bc49-8bcea6a75d84 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 971.370939] env[70020]: WARNING nova.compute.manager [req-848f1416-dbd7-4905-96fa-f230ebc47e3a req-e0b865a6-fd9a-476b-9323-89a7e1eaed6a service nova] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Received unexpected event network-vif-plugged-7e0131ef-1dbf-4927-bc49-8bcea6a75d84 for instance with vm_state building and task_state spawning. [ 971.388194] env[70020]: DEBUG oslo_vmware.api [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618628, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.490261} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.388459] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] a39731d2-0b9b-41fa-b9ac-f80193a26d20/a39731d2-0b9b-41fa-b9ac-f80193a26d20.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 971.388673] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 971.388929] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ee357df3-b882-470a-a312-3bc99a2554ac {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.396132] env[70020]: DEBUG oslo_vmware.api [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for the task: (returnval){ [ 971.396132] env[70020]: value = "task-3618630" [ 971.396132] env[70020]: _type = "Task" [ 971.396132] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.404278] env[70020]: DEBUG oslo_vmware.api [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618630, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.466341] env[70020]: DEBUG oslo_vmware.api [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618629, 'name': PowerOnVM_Task, 'duration_secs': 0.501263} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.466646] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 971.466894] env[70020]: INFO nova.compute.manager [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Took 6.90 seconds to spawn the instance on the hypervisor. [ 971.469586] env[70020]: DEBUG nova.compute.manager [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 971.469586] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65d8973b-89e2-404f-a0b5-885f966014c2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.596641] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e69847cc-877f-4186-afc9-55debae9dd34 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.021s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 971.599654] env[70020]: DEBUG oslo_concurrency.lockutils [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.036s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 971.600450] env[70020]: INFO nova.compute.claims [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 971.625518] env[70020]: INFO nova.scheduler.client.report [None req-e69847cc-877f-4186-afc9-55debae9dd34 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Deleted allocations for instance 8317f386-44d0-4b1b-8590-d0336fafac21 [ 971.745217] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Acquiring lock "refresh_cache-563512c2-b80f-4f14-add5-d48e2b7a0ee9" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.745217] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Acquired lock "refresh_cache-563512c2-b80f-4f14-add5-d48e2b7a0ee9" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 971.745415] env[70020]: DEBUG nova.network.neutron [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 971.768568] env[70020]: DEBUG nova.compute.manager [req-c015bcf2-1c34-40b1-93f5-c094590b1089 req-71191594-3152-408b-bc10-e9a35b9dd665 service nova] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Received event network-changed-7e0131ef-1dbf-4927-bc49-8bcea6a75d84 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 971.768687] env[70020]: DEBUG nova.compute.manager [req-c015bcf2-1c34-40b1-93f5-c094590b1089 req-71191594-3152-408b-bc10-e9a35b9dd665 service nova] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Refreshing instance network info cache due to event network-changed-7e0131ef-1dbf-4927-bc49-8bcea6a75d84. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 971.768937] env[70020]: DEBUG oslo_concurrency.lockutils [req-c015bcf2-1c34-40b1-93f5-c094590b1089 req-71191594-3152-408b-bc10-e9a35b9dd665 service nova] Acquiring lock "refresh_cache-563512c2-b80f-4f14-add5-d48e2b7a0ee9" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.911893] env[70020]: DEBUG oslo_vmware.api [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618630, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069837} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.918016] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 971.918016] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccb8dba6-64d0-477e-83fc-d529c9066e11 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.941681] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] a39731d2-0b9b-41fa-b9ac-f80193a26d20/a39731d2-0b9b-41fa-b9ac-f80193a26d20.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 971.942131] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da925805-9923-4498-a615-3d5d878a90a6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.963440] env[70020]: DEBUG oslo_vmware.api [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for the task: (returnval){ [ 971.963440] env[70020]: value = "task-3618631" [ 971.963440] env[70020]: _type = "Task" [ 971.963440] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.971747] env[70020]: DEBUG oslo_vmware.api [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618631, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.993071] env[70020]: INFO nova.compute.manager [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Took 42.92 seconds to build instance. [ 972.133307] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e69847cc-877f-4186-afc9-55debae9dd34 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "8317f386-44d0-4b1b-8590-d0336fafac21" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.576s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 972.157724] env[70020]: DEBUG nova.network.neutron [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Updating instance_info_cache with network_info: [{"id": "52cf3b73-bbee-4e96-91f2-a1caa2041501", "address": "fa:16:3e:13:c0:92", "network": {"id": "1d8737b3-4820-4ad1-8d76-9ab1a7db867e", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2006556938-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a63e8bb4fcd844f69aaeade95326a91b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f847601f-7479-48eb-842f-41f94eea8537", "external-id": "nsx-vlan-transportzone-35", "segmentation_id": 35, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52cf3b73-bb", "ovs_interfaceid": "52cf3b73-bbee-4e96-91f2-a1caa2041501", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 972.301680] env[70020]: DEBUG nova.network.neutron [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 972.477199] env[70020]: DEBUG oslo_vmware.api [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618631, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.495062] env[70020]: DEBUG oslo_concurrency.lockutils [None req-222c660a-f0d4-47ec-96a2-a48b7df4ca47 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "da07cb36-244f-4f48-a5b6-8d00324c1edf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.288s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 972.649644] env[70020]: DEBUG nova.network.neutron [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Updating instance_info_cache with network_info: [{"id": "7e0131ef-1dbf-4927-bc49-8bcea6a75d84", "address": "fa:16:3e:c0:c4:f9", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.231", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e0131ef-1d", "ovs_interfaceid": "7e0131ef-1dbf-4927-bc49-8bcea6a75d84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 972.659889] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Releasing lock "refresh_cache-2198e7f8-5458-4b97-abb3-0a3c932cebc2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 972.819348] env[70020]: INFO nova.compute.manager [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Rebuilding instance [ 972.876583] env[70020]: DEBUG nova.compute.manager [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 972.877485] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e444837d-78ff-443c-9dd4-ff929d753071 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.976905] env[70020]: DEBUG oslo_vmware.api [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618631, 'name': ReconfigVM_Task, 'duration_secs': 0.992484} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.979833] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Reconfigured VM instance instance-00000054 to attach disk [datastore2] a39731d2-0b9b-41fa-b9ac-f80193a26d20/a39731d2-0b9b-41fa-b9ac-f80193a26d20.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 972.980613] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-83638e78-adb6-4713-8c74-2bc3571191b6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.989981] env[70020]: DEBUG oslo_vmware.api [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for the task: (returnval){ [ 972.989981] env[70020]: value = "task-3618632" [ 972.989981] env[70020]: _type = "Task" [ 972.989981] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.008044] env[70020]: DEBUG oslo_vmware.api [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618632, 'name': Rename_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.048099] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82b7c044-dd6d-4f00-ad1d-91d4d3073992 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.055404] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5df2fe0c-5ec7-429e-8c4e-081680aeed13 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.088274] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8993c10d-69ab-4fc4-b6af-7a5f4a7d8969 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.095738] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e856b9c5-07d1-43f7-830d-5102a0b961bb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.110565] env[70020]: DEBUG nova.compute.provider_tree [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 973.157605] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Releasing lock "refresh_cache-563512c2-b80f-4f14-add5-d48e2b7a0ee9" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 973.157953] env[70020]: DEBUG nova.compute.manager [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Instance network_info: |[{"id": "7e0131ef-1dbf-4927-bc49-8bcea6a75d84", "address": "fa:16:3e:c0:c4:f9", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.231", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e0131ef-1d", "ovs_interfaceid": "7e0131ef-1dbf-4927-bc49-8bcea6a75d84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 973.158356] env[70020]: DEBUG oslo_concurrency.lockutils [req-c015bcf2-1c34-40b1-93f5-c094590b1089 req-71191594-3152-408b-bc10-e9a35b9dd665 service nova] Acquired lock "refresh_cache-563512c2-b80f-4f14-add5-d48e2b7a0ee9" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 973.158493] env[70020]: DEBUG nova.network.neutron [req-c015bcf2-1c34-40b1-93f5-c094590b1089 req-71191594-3152-408b-bc10-e9a35b9dd665 service nova] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Refreshing network info cache for port 7e0131ef-1dbf-4927-bc49-8bcea6a75d84 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 973.159642] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c0:c4:f9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '822050c7-1845-485d-b87e-73778d21c33c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7e0131ef-1dbf-4927-bc49-8bcea6a75d84', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 973.168869] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Creating folder: Project (5e4a0dd86eab468fb76764252de97ffd). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 973.170325] env[70020]: DEBUG oslo_concurrency.lockutils [None req-794eefb1-95ee-4305-b893-f9f2b3ba52cc tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "f56e88f6-3a25-44d9-bdb1-cc4291169c9c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 973.170540] env[70020]: DEBUG oslo_concurrency.lockutils [None req-794eefb1-95ee-4305-b893-f9f2b3ba52cc tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "f56e88f6-3a25-44d9-bdb1-cc4291169c9c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 973.170731] env[70020]: DEBUG oslo_concurrency.lockutils [None req-794eefb1-95ee-4305-b893-f9f2b3ba52cc tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "f56e88f6-3a25-44d9-bdb1-cc4291169c9c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 973.170925] env[70020]: DEBUG oslo_concurrency.lockutils [None req-794eefb1-95ee-4305-b893-f9f2b3ba52cc tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "f56e88f6-3a25-44d9-bdb1-cc4291169c9c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 973.171124] env[70020]: DEBUG oslo_concurrency.lockutils [None req-794eefb1-95ee-4305-b893-f9f2b3ba52cc tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "f56e88f6-3a25-44d9-bdb1-cc4291169c9c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.172587] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9909c273-6c76-4ecc-be25-30382015fb08 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.177209] env[70020]: INFO nova.compute.manager [None req-794eefb1-95ee-4305-b893-f9f2b3ba52cc tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Terminating instance [ 973.191021] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Created folder: Project (5e4a0dd86eab468fb76764252de97ffd) in parent group-v721521. [ 973.191021] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Creating folder: Instances. Parent ref: group-v721750. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 973.191021] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-07d2c3ec-1000-465b-b1e0-11d4afb4657b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.198057] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Created folder: Instances in parent group-v721750. [ 973.198291] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 973.198701] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 973.199260] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-754fbb71-9e36-438a-ae02-cecf9bbd7635 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.218663] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 973.218663] env[70020]: value = "task-3618635" [ 973.218663] env[70020]: _type = "Task" [ 973.218663] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.226091] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618635, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.361263] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 973.362215] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71bae01c-65cc-4529-b188-40de3058a1f0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.370721] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 973.371060] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-546e8e45-916e-493c-8363-1b4dde69630c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.440667] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 973.441070] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 973.441546] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Deleting the datastore file [datastore1] 2198e7f8-5458-4b97-abb3-0a3c932cebc2 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 973.442655] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fdd442af-3e62-4939-8788-36877779889b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.449541] env[70020]: DEBUG oslo_vmware.api [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for the task: (returnval){ [ 973.449541] env[70020]: value = "task-3618637" [ 973.449541] env[70020]: _type = "Task" [ 973.449541] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.458917] env[70020]: DEBUG oslo_vmware.api [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618637, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.504069] env[70020]: DEBUG oslo_vmware.api [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618632, 'name': Rename_Task, 'duration_secs': 0.336036} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.504398] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 973.504782] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-608bacbe-aae1-4484-8754-d082114e36c4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.512132] env[70020]: DEBUG oslo_vmware.api [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for the task: (returnval){ [ 973.512132] env[70020]: value = "task-3618638" [ 973.512132] env[70020]: _type = "Task" [ 973.512132] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.520917] env[70020]: DEBUG oslo_vmware.api [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618638, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.614829] env[70020]: DEBUG nova.scheduler.client.report [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 973.681456] env[70020]: DEBUG nova.compute.manager [None req-794eefb1-95ee-4305-b893-f9f2b3ba52cc tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 973.681707] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-794eefb1-95ee-4305-b893-f9f2b3ba52cc tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 973.682636] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2953130-ed8d-403d-8340-90b850aac191 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.692069] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-794eefb1-95ee-4305-b893-f9f2b3ba52cc tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 973.693069] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bed783e2-cd72-453a-b4cb-8abe9bb42a46 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.699964] env[70020]: DEBUG oslo_vmware.api [None req-794eefb1-95ee-4305-b893-f9f2b3ba52cc tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 973.699964] env[70020]: value = "task-3618639" [ 973.699964] env[70020]: _type = "Task" [ 973.699964] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.709946] env[70020]: DEBUG oslo_vmware.api [None req-794eefb1-95ee-4305-b893-f9f2b3ba52cc tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618639, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.727246] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618635, 'name': CreateVM_Task, 'duration_secs': 0.451866} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.727478] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 973.728235] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.728596] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 973.728806] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 973.729096] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3271260d-3319-4c1a-ba85-d10041f23eaf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.734495] env[70020]: DEBUG oslo_vmware.api [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Waiting for the task: (returnval){ [ 973.734495] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52d570bc-874c-a1f2-4b1b-1d628ccc5a24" [ 973.734495] env[70020]: _type = "Task" [ 973.734495] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.743718] env[70020]: DEBUG oslo_vmware.api [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52d570bc-874c-a1f2-4b1b-1d628ccc5a24, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.895695] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 973.896055] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-00a146c5-01af-445a-9c2b-a2155cae12de {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.905807] env[70020]: DEBUG oslo_vmware.api [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 973.905807] env[70020]: value = "task-3618640" [ 973.905807] env[70020]: _type = "Task" [ 973.905807] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.917785] env[70020]: DEBUG oslo_vmware.api [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618640, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.961240] env[70020]: DEBUG nova.compute.manager [req-1dd4f138-03e9-4c54-81ab-927c6b2b6f42 req-bd97ac39-5bfe-4bc1-b32a-ec62dedf607d service nova] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Received event network-changed-998637c4-6d93-4002-8b56-ee2560a41b7c {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 973.961843] env[70020]: DEBUG nova.compute.manager [req-1dd4f138-03e9-4c54-81ab-927c6b2b6f42 req-bd97ac39-5bfe-4bc1-b32a-ec62dedf607d service nova] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Refreshing instance network info cache due to event network-changed-998637c4-6d93-4002-8b56-ee2560a41b7c. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 973.964567] env[70020]: DEBUG oslo_concurrency.lockutils [req-1dd4f138-03e9-4c54-81ab-927c6b2b6f42 req-bd97ac39-5bfe-4bc1-b32a-ec62dedf607d service nova] Acquiring lock "refresh_cache-d45966fe-98ff-4466-8e7e-90550034742f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.964567] env[70020]: DEBUG oslo_concurrency.lockutils [req-1dd4f138-03e9-4c54-81ab-927c6b2b6f42 req-bd97ac39-5bfe-4bc1-b32a-ec62dedf607d service nova] Acquired lock "refresh_cache-d45966fe-98ff-4466-8e7e-90550034742f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 973.964567] env[70020]: DEBUG nova.network.neutron [req-1dd4f138-03e9-4c54-81ab-927c6b2b6f42 req-bd97ac39-5bfe-4bc1-b32a-ec62dedf607d service nova] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Refreshing network info cache for port 998637c4-6d93-4002-8b56-ee2560a41b7c {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 973.973377] env[70020]: DEBUG oslo_vmware.api [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618637, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143394} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.973377] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 973.973377] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 973.973377] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 974.001292] env[70020]: INFO nova.scheduler.client.report [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Deleted allocations for instance 2198e7f8-5458-4b97-abb3-0a3c932cebc2 [ 974.024203] env[70020]: DEBUG oslo_vmware.api [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618638, 'name': PowerOnVM_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.093596] env[70020]: DEBUG nova.network.neutron [req-c015bcf2-1c34-40b1-93f5-c094590b1089 req-71191594-3152-408b-bc10-e9a35b9dd665 service nova] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Updated VIF entry in instance network info cache for port 7e0131ef-1dbf-4927-bc49-8bcea6a75d84. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 974.094121] env[70020]: DEBUG nova.network.neutron [req-c015bcf2-1c34-40b1-93f5-c094590b1089 req-71191594-3152-408b-bc10-e9a35b9dd665 service nova] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Updating instance_info_cache with network_info: [{"id": "7e0131ef-1dbf-4927-bc49-8bcea6a75d84", "address": "fa:16:3e:c0:c4:f9", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.231", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e0131ef-1d", "ovs_interfaceid": "7e0131ef-1dbf-4927-bc49-8bcea6a75d84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 974.123020] env[70020]: DEBUG oslo_concurrency.lockutils [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.522s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 974.123020] env[70020]: DEBUG nova.compute.manager [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 974.124707] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.930s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 974.127128] env[70020]: INFO nova.compute.claims [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 974.158168] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "42d20396-883d-4141-a226-61f476057cbe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 974.158411] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "42d20396-883d-4141-a226-61f476057cbe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 974.211339] env[70020]: DEBUG oslo_vmware.api [None req-794eefb1-95ee-4305-b893-f9f2b3ba52cc tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618639, 'name': PowerOffVM_Task, 'duration_secs': 0.222391} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.211604] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-794eefb1-95ee-4305-b893-f9f2b3ba52cc tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 974.211770] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-794eefb1-95ee-4305-b893-f9f2b3ba52cc tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 974.212031] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dea9c5e4-4f31-4d8d-b07c-5125091cd106 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.245134] env[70020]: DEBUG oslo_vmware.api [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52d570bc-874c-a1f2-4b1b-1d628ccc5a24, 'name': SearchDatastore_Task, 'duration_secs': 0.010817} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.245447] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 974.245682] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 974.245946] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.246121] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 974.246532] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 974.246874] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1b236ff3-cb65-40cb-befb-c6d71a5148ce {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.256022] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 974.256223] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 974.256917] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-787741b7-0b7e-4769-b8ec-a1eb59f08529 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.263566] env[70020]: DEBUG oslo_vmware.api [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Waiting for the task: (returnval){ [ 974.263566] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]529a1700-d618-ba00-5102-9da184fd0349" [ 974.263566] env[70020]: _type = "Task" [ 974.263566] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.276459] env[70020]: DEBUG oslo_vmware.api [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]529a1700-d618-ba00-5102-9da184fd0349, 'name': SearchDatastore_Task, 'duration_secs': 0.009737} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.277617] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1def870d-86cf-42fc-910a-726cdb6f746c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.285721] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-794eefb1-95ee-4305-b893-f9f2b3ba52cc tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 974.285942] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-794eefb1-95ee-4305-b893-f9f2b3ba52cc tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 974.286108] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-794eefb1-95ee-4305-b893-f9f2b3ba52cc tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Deleting the datastore file [datastore2] f56e88f6-3a25-44d9-bdb1-cc4291169c9c {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 974.286352] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1644c79d-82a0-4b61-9242-4e740a3b0563 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.289945] env[70020]: DEBUG oslo_vmware.api [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Waiting for the task: (returnval){ [ 974.289945] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52327182-22a7-0d49-b944-17da2b2fc1df" [ 974.289945] env[70020]: _type = "Task" [ 974.289945] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.294326] env[70020]: DEBUG oslo_vmware.api [None req-794eefb1-95ee-4305-b893-f9f2b3ba52cc tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 974.294326] env[70020]: value = "task-3618642" [ 974.294326] env[70020]: _type = "Task" [ 974.294326] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.301144] env[70020]: DEBUG oslo_vmware.api [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52327182-22a7-0d49-b944-17da2b2fc1df, 'name': SearchDatastore_Task, 'duration_secs': 0.00936} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.301704] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 974.301965] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 563512c2-b80f-4f14-add5-d48e2b7a0ee9/563512c2-b80f-4f14-add5-d48e2b7a0ee9.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 974.302223] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d973b39b-0240-4482-9681-4c9e6eed1fd2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.306692] env[70020]: DEBUG oslo_vmware.api [None req-794eefb1-95ee-4305-b893-f9f2b3ba52cc tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618642, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.312456] env[70020]: DEBUG oslo_vmware.api [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Waiting for the task: (returnval){ [ 974.312456] env[70020]: value = "task-3618643" [ 974.312456] env[70020]: _type = "Task" [ 974.312456] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.320137] env[70020]: DEBUG oslo_vmware.api [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Task: {'id': task-3618643, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.414893] env[70020]: DEBUG oslo_vmware.api [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618640, 'name': PowerOffVM_Task, 'duration_secs': 0.181319} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.415522] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 974.415712] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 974.417308] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa4d53e1-3e09-4693-8726-ec87a66ecb97 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.424972] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 974.425142] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-963c9fb2-c035-4764-9124-7d89bd8198ad {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.493026] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 974.493026] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 974.493026] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Deleting the datastore file [datastore1] 3a4f2342-58e7-436b-a779-0fa093b52409 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 974.493026] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-735ed337-7fdb-4289-8d58-68a5d9e4df74 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.500940] env[70020]: DEBUG oslo_vmware.api [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 974.500940] env[70020]: value = "task-3618645" [ 974.500940] env[70020]: _type = "Task" [ 974.500940] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.509104] env[70020]: DEBUG oslo_vmware.api [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618645, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.515583] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 974.524966] env[70020]: DEBUG oslo_vmware.api [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618638, 'name': PowerOnVM_Task, 'duration_secs': 0.604726} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.525355] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 974.525565] env[70020]: INFO nova.compute.manager [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Took 7.42 seconds to spawn the instance on the hypervisor. [ 974.525738] env[70020]: DEBUG nova.compute.manager [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 974.526545] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d25bdb7-070c-4c80-b4c5-8c0fdf8d78b7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.597724] env[70020]: DEBUG oslo_concurrency.lockutils [req-c015bcf2-1c34-40b1-93f5-c094590b1089 req-71191594-3152-408b-bc10-e9a35b9dd665 service nova] Releasing lock "refresh_cache-563512c2-b80f-4f14-add5-d48e2b7a0ee9" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 974.634026] env[70020]: DEBUG nova.compute.utils [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 974.638486] env[70020]: DEBUG nova.compute.manager [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 974.638869] env[70020]: DEBUG nova.network.neutron [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 974.663151] env[70020]: DEBUG nova.compute.manager [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 974.704287] env[70020]: DEBUG nova.policy [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1e2413fa85cf43728ab5e444d5a8cccf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bfcc2b6300e54620aee884920b416cd6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 974.812952] env[70020]: DEBUG oslo_vmware.api [None req-794eefb1-95ee-4305-b893-f9f2b3ba52cc tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618642, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145431} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.813452] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-794eefb1-95ee-4305-b893-f9f2b3ba52cc tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 974.813653] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-794eefb1-95ee-4305-b893-f9f2b3ba52cc tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 974.813855] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-794eefb1-95ee-4305-b893-f9f2b3ba52cc tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 974.814058] env[70020]: INFO nova.compute.manager [None req-794eefb1-95ee-4305-b893-f9f2b3ba52cc tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Took 1.13 seconds to destroy the instance on the hypervisor. [ 974.814299] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-794eefb1-95ee-4305-b893-f9f2b3ba52cc tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 974.819323] env[70020]: DEBUG nova.compute.manager [-] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 974.819466] env[70020]: DEBUG nova.network.neutron [-] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 974.828195] env[70020]: DEBUG oslo_vmware.api [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Task: {'id': task-3618643, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.884679] env[70020]: DEBUG nova.network.neutron [req-1dd4f138-03e9-4c54-81ab-927c6b2b6f42 req-bd97ac39-5bfe-4bc1-b32a-ec62dedf607d service nova] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Updated VIF entry in instance network info cache for port 998637c4-6d93-4002-8b56-ee2560a41b7c. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 974.885153] env[70020]: DEBUG nova.network.neutron [req-1dd4f138-03e9-4c54-81ab-927c6b2b6f42 req-bd97ac39-5bfe-4bc1-b32a-ec62dedf607d service nova] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Updating instance_info_cache with network_info: [{"id": "998637c4-6d93-4002-8b56-ee2560a41b7c", "address": "fa:16:3e:37:69:bf", "network": {"id": "405de05e-83d0-46c5-9397-bb1ba00f7ab7", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-202335876-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.202", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16f59a8f930846ec9299416b9ec5dd48", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap998637c4-6d", "ovs_interfaceid": "998637c4-6d93-4002-8b56-ee2560a41b7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.014950] env[70020]: DEBUG oslo_vmware.api [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618645, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.359666} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.015288] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 975.015436] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 975.015610] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 975.053326] env[70020]: INFO nova.compute.manager [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Took 44.88 seconds to build instance. [ 975.139964] env[70020]: DEBUG nova.compute.manager [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 975.202333] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 975.329673] env[70020]: DEBUG oslo_vmware.api [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Task: {'id': task-3618643, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.531215} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.330565] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 563512c2-b80f-4f14-add5-d48e2b7a0ee9/563512c2-b80f-4f14-add5-d48e2b7a0ee9.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 975.330565] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 975.330565] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-45ccbb83-35c9-416c-b97a-492fff9b3b18 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.339091] env[70020]: DEBUG oslo_vmware.api [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Waiting for the task: (returnval){ [ 975.339091] env[70020]: value = "task-3618646" [ 975.339091] env[70020]: _type = "Task" [ 975.339091] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.350016] env[70020]: DEBUG oslo_vmware.api [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Task: {'id': task-3618646, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.389159] env[70020]: DEBUG oslo_concurrency.lockutils [req-1dd4f138-03e9-4c54-81ab-927c6b2b6f42 req-bd97ac39-5bfe-4bc1-b32a-ec62dedf607d service nova] Releasing lock "refresh_cache-d45966fe-98ff-4466-8e7e-90550034742f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 975.389159] env[70020]: DEBUG nova.compute.manager [req-1dd4f138-03e9-4c54-81ab-927c6b2b6f42 req-bd97ac39-5bfe-4bc1-b32a-ec62dedf607d service nova] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Received event network-vif-unplugged-52cf3b73-bbee-4e96-91f2-a1caa2041501 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 975.389159] env[70020]: DEBUG oslo_concurrency.lockutils [req-1dd4f138-03e9-4c54-81ab-927c6b2b6f42 req-bd97ac39-5bfe-4bc1-b32a-ec62dedf607d service nova] Acquiring lock "2198e7f8-5458-4b97-abb3-0a3c932cebc2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 975.389503] env[70020]: DEBUG oslo_concurrency.lockutils [req-1dd4f138-03e9-4c54-81ab-927c6b2b6f42 req-bd97ac39-5bfe-4bc1-b32a-ec62dedf607d service nova] Lock "2198e7f8-5458-4b97-abb3-0a3c932cebc2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 975.389503] env[70020]: DEBUG oslo_concurrency.lockutils [req-1dd4f138-03e9-4c54-81ab-927c6b2b6f42 req-bd97ac39-5bfe-4bc1-b32a-ec62dedf607d service nova] Lock "2198e7f8-5458-4b97-abb3-0a3c932cebc2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 975.389805] env[70020]: DEBUG nova.compute.manager [req-1dd4f138-03e9-4c54-81ab-927c6b2b6f42 req-bd97ac39-5bfe-4bc1-b32a-ec62dedf607d service nova] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] No waiting events found dispatching network-vif-unplugged-52cf3b73-bbee-4e96-91f2-a1caa2041501 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 975.389805] env[70020]: WARNING nova.compute.manager [req-1dd4f138-03e9-4c54-81ab-927c6b2b6f42 req-bd97ac39-5bfe-4bc1-b32a-ec62dedf607d service nova] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Received unexpected event network-vif-unplugged-52cf3b73-bbee-4e96-91f2-a1caa2041501 for instance with vm_state shelved and task_state shelving_offloading. [ 975.390072] env[70020]: DEBUG nova.compute.manager [req-1dd4f138-03e9-4c54-81ab-927c6b2b6f42 req-bd97ac39-5bfe-4bc1-b32a-ec62dedf607d service nova] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Received event network-changed-9c6dd76e-1819-4f40-b5b1-e548b0c947ec {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 975.390233] env[70020]: DEBUG nova.compute.manager [req-1dd4f138-03e9-4c54-81ab-927c6b2b6f42 req-bd97ac39-5bfe-4bc1-b32a-ec62dedf607d service nova] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Refreshing instance network info cache due to event network-changed-9c6dd76e-1819-4f40-b5b1-e548b0c947ec. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 975.390318] env[70020]: DEBUG oslo_concurrency.lockutils [req-1dd4f138-03e9-4c54-81ab-927c6b2b6f42 req-bd97ac39-5bfe-4bc1-b32a-ec62dedf607d service nova] Acquiring lock "refresh_cache-da07cb36-244f-4f48-a5b6-8d00324c1edf" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.390585] env[70020]: DEBUG oslo_concurrency.lockutils [req-1dd4f138-03e9-4c54-81ab-927c6b2b6f42 req-bd97ac39-5bfe-4bc1-b32a-ec62dedf607d service nova] Acquired lock "refresh_cache-da07cb36-244f-4f48-a5b6-8d00324c1edf" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 975.390585] env[70020]: DEBUG nova.network.neutron [req-1dd4f138-03e9-4c54-81ab-927c6b2b6f42 req-bd97ac39-5bfe-4bc1-b32a-ec62dedf607d service nova] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Refreshing network info cache for port 9c6dd76e-1819-4f40-b5b1-e548b0c947ec {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 975.455965] env[70020]: DEBUG nova.network.neutron [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Successfully created port: 71a34572-9310-4b13-b628-322b0a2dcf71 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 975.557226] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2fa7283-44b0-45f0-b985-0c7266eabb30 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Lock "a39731d2-0b9b-41fa-b9ac-f80193a26d20" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.460s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 975.608036] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54dfb920-7de3-4db8-aa6d-93b1a8ccd7da {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.616562] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c02f8fb-02f6-4759-ae55-7a302d844a75 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.651852] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a1e3b9f-2022-4135-98bd-8c9beeb41752 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.660541] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17db06a8-dabe-485b-8380-0bf545eeac96 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.677068] env[70020]: DEBUG nova.compute.provider_tree [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 975.797076] env[70020]: DEBUG nova.compute.manager [req-710508ec-788f-4d94-b8fb-dde093257aff req-a763afb9-6994-42a7-988a-f2d1665dda7e service nova] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Received event network-vif-deleted-9071978f-4173-4873-86de-85c11de7ddb7 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 975.798088] env[70020]: INFO nova.compute.manager [req-710508ec-788f-4d94-b8fb-dde093257aff req-a763afb9-6994-42a7-988a-f2d1665dda7e service nova] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Neutron deleted interface 9071978f-4173-4873-86de-85c11de7ddb7; detaching it from the instance and deleting it from the info cache [ 975.798088] env[70020]: DEBUG nova.network.neutron [req-710508ec-788f-4d94-b8fb-dde093257aff req-a763afb9-6994-42a7-988a-f2d1665dda7e service nova] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.849174] env[70020]: DEBUG oslo_vmware.api [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Task: {'id': task-3618646, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071554} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.849174] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 975.849744] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00282a2a-41d5-43c1-a62a-362ad1d5e629 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.890271] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Reconfiguring VM instance instance-00000053 to attach disk [datastore2] 563512c2-b80f-4f14-add5-d48e2b7a0ee9/563512c2-b80f-4f14-add5-d48e2b7a0ee9.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 975.890271] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6e125866-df4b-4f24-943d-f2774dc38174 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.915480] env[70020]: DEBUG oslo_vmware.api [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Waiting for the task: (returnval){ [ 975.915480] env[70020]: value = "task-3618647" [ 975.915480] env[70020]: _type = "Task" [ 975.915480] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.924420] env[70020]: DEBUG oslo_vmware.api [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Task: {'id': task-3618647, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.945941] env[70020]: DEBUG nova.network.neutron [-] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.071735] env[70020]: DEBUG nova.virt.hardware [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 976.072067] env[70020]: DEBUG nova.virt.hardware [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 976.072287] env[70020]: DEBUG nova.virt.hardware [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 976.072492] env[70020]: DEBUG nova.virt.hardware [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 976.072639] env[70020]: DEBUG nova.virt.hardware [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 976.072805] env[70020]: DEBUG nova.virt.hardware [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 976.073057] env[70020]: DEBUG nova.virt.hardware [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 976.073225] env[70020]: DEBUG nova.virt.hardware [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 976.073387] env[70020]: DEBUG nova.virt.hardware [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 976.073547] env[70020]: DEBUG nova.virt.hardware [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 976.073715] env[70020]: DEBUG nova.virt.hardware [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 976.074588] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4778f84-e1f4-4d19-a7f2-178529f6bda7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.085611] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca67bdc9-debe-4aca-9541-9753bf31cba5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.101702] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e0:89:b7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5b21ab10-d886-4453-9472-9e11fb3c450d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd1869b01-6eea-468c-ac71-153c8eeda8ca', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 976.109421] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 976.110830] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 976.110830] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fb3bddc3-2a5a-4b05-bbec-f4d5621e0587 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.131603] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 976.131603] env[70020]: value = "task-3618648" [ 976.131603] env[70020]: _type = "Task" [ 976.131603] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.140432] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618648, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.168245] env[70020]: DEBUG nova.compute.manager [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 976.181541] env[70020]: DEBUG nova.scheduler.client.report [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 976.196983] env[70020]: DEBUG nova.virt.hardware [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 976.197228] env[70020]: DEBUG nova.virt.hardware [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 976.197333] env[70020]: DEBUG nova.virt.hardware [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 976.197535] env[70020]: DEBUG nova.virt.hardware [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 976.197683] env[70020]: DEBUG nova.virt.hardware [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 976.197826] env[70020]: DEBUG nova.virt.hardware [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 976.198101] env[70020]: DEBUG nova.virt.hardware [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 976.199075] env[70020]: DEBUG nova.virt.hardware [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 976.199075] env[70020]: DEBUG nova.virt.hardware [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 976.199075] env[70020]: DEBUG nova.virt.hardware [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 976.199075] env[70020]: DEBUG nova.virt.hardware [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 976.199698] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b652e2e3-9eba-4549-afe8-3df6b80daea6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.204164] env[70020]: DEBUG nova.network.neutron [req-1dd4f138-03e9-4c54-81ab-927c6b2b6f42 req-bd97ac39-5bfe-4bc1-b32a-ec62dedf607d service nova] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Updated VIF entry in instance network info cache for port 9c6dd76e-1819-4f40-b5b1-e548b0c947ec. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 976.204672] env[70020]: DEBUG nova.network.neutron [req-1dd4f138-03e9-4c54-81ab-927c6b2b6f42 req-bd97ac39-5bfe-4bc1-b32a-ec62dedf607d service nova] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Updating instance_info_cache with network_info: [{"id": "9c6dd76e-1819-4f40-b5b1-e548b0c947ec", "address": "fa:16:3e:da:06:2a", "network": {"id": "1aa98d4f-b271-4d4d-b8d3-156403999ea9", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1993994671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7412ed0b196c4d44b03bc93b0aae2954", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "503991c4-44d0-42d9-aa03-5259331f1051", "external-id": "nsx-vlan-transportzone-3", "segmentation_id": 3, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c6dd76e-18", "ovs_interfaceid": "9c6dd76e-1819-4f40-b5b1-e548b0c947ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.212354] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73c00c6c-7fe5-48aa-8db3-ea766262f51d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.300833] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a44b455d-6398-4899-b27d-aa0703808e63 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.311850] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07d0656e-54f7-42c9-b2b2-84dcbaa14e9a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.324486] env[70020]: INFO nova.compute.manager [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Rescuing [ 976.324863] env[70020]: DEBUG oslo_concurrency.lockutils [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Acquiring lock "refresh_cache-a39731d2-0b9b-41fa-b9ac-f80193a26d20" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.325080] env[70020]: DEBUG oslo_concurrency.lockutils [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Acquired lock "refresh_cache-a39731d2-0b9b-41fa-b9ac-f80193a26d20" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 976.325293] env[70020]: DEBUG nova.network.neutron [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 976.348439] env[70020]: DEBUG nova.compute.manager [req-710508ec-788f-4d94-b8fb-dde093257aff req-a763afb9-6994-42a7-988a-f2d1665dda7e service nova] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Detach interface failed, port_id=9071978f-4173-4873-86de-85c11de7ddb7, reason: Instance f56e88f6-3a25-44d9-bdb1-cc4291169c9c could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 976.359778] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Acquiring lock "2198e7f8-5458-4b97-abb3-0a3c932cebc2" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 976.426622] env[70020]: DEBUG oslo_vmware.api [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Task: {'id': task-3618647, 'name': ReconfigVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.449807] env[70020]: INFO nova.compute.manager [-] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Took 1.63 seconds to deallocate network for instance. [ 976.642129] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618648, 'name': CreateVM_Task, 'duration_secs': 0.38587} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.642299] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 976.642974] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.643191] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 976.643469] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 976.643734] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-891d331c-f706-43d4-824b-f1cc7952b0a5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.650416] env[70020]: DEBUG oslo_vmware.api [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 976.650416] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52418f05-857f-b203-bbd8-0c3a5bf5bcc3" [ 976.650416] env[70020]: _type = "Task" [ 976.650416] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.659388] env[70020]: DEBUG oslo_vmware.api [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52418f05-857f-b203-bbd8-0c3a5bf5bcc3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.690532] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.566s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 976.691129] env[70020]: DEBUG nova.compute.manager [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 976.694158] env[70020]: DEBUG oslo_concurrency.lockutils [None req-52468b9b-9f69-4190-b7fb-0a6185aa7dca tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.499s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 976.694416] env[70020]: DEBUG nova.objects.instance [None req-52468b9b-9f69-4190-b7fb-0a6185aa7dca tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Lazy-loading 'resources' on Instance uuid 4335f92a-897a-4779-be70-4f0754a66d53 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 976.707893] env[70020]: DEBUG oslo_concurrency.lockutils [req-1dd4f138-03e9-4c54-81ab-927c6b2b6f42 req-bd97ac39-5bfe-4bc1-b32a-ec62dedf607d service nova] Releasing lock "refresh_cache-da07cb36-244f-4f48-a5b6-8d00324c1edf" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 976.708162] env[70020]: DEBUG nova.compute.manager [req-1dd4f138-03e9-4c54-81ab-927c6b2b6f42 req-bd97ac39-5bfe-4bc1-b32a-ec62dedf607d service nova] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Received event network-changed-52cf3b73-bbee-4e96-91f2-a1caa2041501 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 976.708340] env[70020]: DEBUG nova.compute.manager [req-1dd4f138-03e9-4c54-81ab-927c6b2b6f42 req-bd97ac39-5bfe-4bc1-b32a-ec62dedf607d service nova] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Refreshing instance network info cache due to event network-changed-52cf3b73-bbee-4e96-91f2-a1caa2041501. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 976.708635] env[70020]: DEBUG oslo_concurrency.lockutils [req-1dd4f138-03e9-4c54-81ab-927c6b2b6f42 req-bd97ac39-5bfe-4bc1-b32a-ec62dedf607d service nova] Acquiring lock "refresh_cache-2198e7f8-5458-4b97-abb3-0a3c932cebc2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.708785] env[70020]: DEBUG oslo_concurrency.lockutils [req-1dd4f138-03e9-4c54-81ab-927c6b2b6f42 req-bd97ac39-5bfe-4bc1-b32a-ec62dedf607d service nova] Acquired lock "refresh_cache-2198e7f8-5458-4b97-abb3-0a3c932cebc2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 976.708966] env[70020]: DEBUG nova.network.neutron [req-1dd4f138-03e9-4c54-81ab-927c6b2b6f42 req-bd97ac39-5bfe-4bc1-b32a-ec62dedf607d service nova] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Refreshing network info cache for port 52cf3b73-bbee-4e96-91f2-a1caa2041501 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 976.927803] env[70020]: DEBUG oslo_vmware.api [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Task: {'id': task-3618647, 'name': ReconfigVM_Task, 'duration_secs': 0.55893} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.928197] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Reconfigured VM instance instance-00000053 to attach disk [datastore2] 563512c2-b80f-4f14-add5-d48e2b7a0ee9/563512c2-b80f-4f14-add5-d48e2b7a0ee9.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 976.928878] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a4567125-ce0b-4b14-a8fd-7fb9bad40358 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.939893] env[70020]: DEBUG oslo_vmware.api [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Waiting for the task: (returnval){ [ 976.939893] env[70020]: value = "task-3618649" [ 976.939893] env[70020]: _type = "Task" [ 976.939893] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.955143] env[70020]: DEBUG oslo_vmware.api [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Task: {'id': task-3618649, 'name': Rename_Task} progress is 10%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.962040] env[70020]: DEBUG oslo_concurrency.lockutils [None req-794eefb1-95ee-4305-b893-f9f2b3ba52cc tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 977.117254] env[70020]: DEBUG nova.network.neutron [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Updating instance_info_cache with network_info: [{"id": "46420036-2adf-470d-b041-a6487903eed6", "address": "fa:16:3e:b9:a2:b2", "network": {"id": "a4584b23-3c15-4ae9-b89e-0a0e14eeccb8", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1275535043-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6c8373e835ad4420890442390872c6fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46420036-2a", "ovs_interfaceid": "46420036-2adf-470d-b041-a6487903eed6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.155941] env[70020]: DEBUG nova.network.neutron [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Successfully updated port: 71a34572-9310-4b13-b628-322b0a2dcf71 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 977.163210] env[70020]: DEBUG oslo_vmware.api [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52418f05-857f-b203-bbd8-0c3a5bf5bcc3, 'name': SearchDatastore_Task, 'duration_secs': 0.013779} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.163210] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 977.163210] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 977.163210] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 977.163210] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 977.163210] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 977.163210] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c2b3d0b-cd6b-4ef9-abff-531a700e8d17 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.172438] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 977.172749] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 977.173420] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d871bcb8-5153-49e9-8665-476801dfe54c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.180018] env[70020]: DEBUG oslo_vmware.api [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 977.180018] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52f3a51f-1805-d2f5-d3d8-3c2e119887f9" [ 977.180018] env[70020]: _type = "Task" [ 977.180018] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.188110] env[70020]: DEBUG oslo_vmware.api [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52f3a51f-1805-d2f5-d3d8-3c2e119887f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.198070] env[70020]: DEBUG nova.compute.utils [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 977.204564] env[70020]: DEBUG nova.compute.manager [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 977.204917] env[70020]: DEBUG nova.network.neutron [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 977.282071] env[70020]: DEBUG nova.policy [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '291265cdc1164603a9011173b1457c31', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '74b060ffb3ac4ecd95dcd85d4744dc2a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 977.451274] env[70020]: DEBUG oslo_vmware.api [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Task: {'id': task-3618649, 'name': Rename_Task, 'duration_secs': 0.29269} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.451564] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 977.451813] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a5de77d8-ed07-43e6-b044-3fd2f332a4e7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.463024] env[70020]: DEBUG oslo_vmware.api [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Waiting for the task: (returnval){ [ 977.463024] env[70020]: value = "task-3618650" [ 977.463024] env[70020]: _type = "Task" [ 977.463024] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.476733] env[70020]: DEBUG oslo_vmware.api [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Task: {'id': task-3618650, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.534540] env[70020]: DEBUG nova.network.neutron [req-1dd4f138-03e9-4c54-81ab-927c6b2b6f42 req-bd97ac39-5bfe-4bc1-b32a-ec62dedf607d service nova] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Updated VIF entry in instance network info cache for port 52cf3b73-bbee-4e96-91f2-a1caa2041501. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 977.534847] env[70020]: DEBUG nova.network.neutron [req-1dd4f138-03e9-4c54-81ab-927c6b2b6f42 req-bd97ac39-5bfe-4bc1-b32a-ec62dedf607d service nova] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Updating instance_info_cache with network_info: [{"id": "52cf3b73-bbee-4e96-91f2-a1caa2041501", "address": "fa:16:3e:13:c0:92", "network": {"id": "1d8737b3-4820-4ad1-8d76-9ab1a7db867e", "bridge": null, "label": "tempest-ServersNegativeTestJSON-2006556938-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a63e8bb4fcd844f69aaeade95326a91b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap52cf3b73-bb", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.615670] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7311b37c-ff70-4e3a-aa08-a4a4fb10b514 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.622909] env[70020]: DEBUG oslo_concurrency.lockutils [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Releasing lock "refresh_cache-a39731d2-0b9b-41fa-b9ac-f80193a26d20" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 977.628384] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d77bf17-0618-4e3e-9b1c-62c0be55784d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.662522] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a583cad-a652-4194-9416-a5a921de953d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.665377] env[70020]: DEBUG oslo_concurrency.lockutils [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Acquiring lock "refresh_cache-58dded95-033a-46d7-b02e-5b2f2551234c" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 977.665512] env[70020]: DEBUG oslo_concurrency.lockutils [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Acquired lock "refresh_cache-58dded95-033a-46d7-b02e-5b2f2551234c" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 977.665659] env[70020]: DEBUG nova.network.neutron [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 977.667401] env[70020]: DEBUG nova.network.neutron [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Successfully created port: a805f6e6-6016-433a-b106-0e686f4bd6ef {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 977.677768] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e62c9fd9-d0b4-4aa4-9fe0-46736b90761a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.696356] env[70020]: DEBUG nova.compute.provider_tree [None req-52468b9b-9f69-4190-b7fb-0a6185aa7dca tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 977.701547] env[70020]: DEBUG oslo_vmware.api [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52f3a51f-1805-d2f5-d3d8-3c2e119887f9, 'name': SearchDatastore_Task, 'duration_secs': 0.010557} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.702517] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-777317f0-2329-4b1f-9da2-9e52882749a8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.705531] env[70020]: DEBUG nova.compute.manager [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 977.711485] env[70020]: DEBUG oslo_vmware.api [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 977.711485] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52b08a9a-ef34-3aaa-a12c-d8c402e4075c" [ 977.711485] env[70020]: _type = "Task" [ 977.711485] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.719577] env[70020]: DEBUG oslo_vmware.api [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b08a9a-ef34-3aaa-a12c-d8c402e4075c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.764437] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Acquiring lock "3dedfa48-0839-462e-8c32-ba5252f07ac0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 977.764760] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Lock "3dedfa48-0839-462e-8c32-ba5252f07ac0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 977.823032] env[70020]: DEBUG nova.compute.manager [req-4e2f264a-65ba-4293-8ea4-8e2d986935c9 req-3a901825-a14c-481f-80cf-62f4f4819ec6 service nova] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Received event network-vif-plugged-71a34572-9310-4b13-b628-322b0a2dcf71 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 977.823261] env[70020]: DEBUG oslo_concurrency.lockutils [req-4e2f264a-65ba-4293-8ea4-8e2d986935c9 req-3a901825-a14c-481f-80cf-62f4f4819ec6 service nova] Acquiring lock "58dded95-033a-46d7-b02e-5b2f2551234c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 977.823621] env[70020]: DEBUG oslo_concurrency.lockutils [req-4e2f264a-65ba-4293-8ea4-8e2d986935c9 req-3a901825-a14c-481f-80cf-62f4f4819ec6 service nova] Lock "58dded95-033a-46d7-b02e-5b2f2551234c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 977.823896] env[70020]: DEBUG oslo_concurrency.lockutils [req-4e2f264a-65ba-4293-8ea4-8e2d986935c9 req-3a901825-a14c-481f-80cf-62f4f4819ec6 service nova] Lock "58dded95-033a-46d7-b02e-5b2f2551234c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 977.824283] env[70020]: DEBUG nova.compute.manager [req-4e2f264a-65ba-4293-8ea4-8e2d986935c9 req-3a901825-a14c-481f-80cf-62f4f4819ec6 service nova] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] No waiting events found dispatching network-vif-plugged-71a34572-9310-4b13-b628-322b0a2dcf71 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 977.824386] env[70020]: WARNING nova.compute.manager [req-4e2f264a-65ba-4293-8ea4-8e2d986935c9 req-3a901825-a14c-481f-80cf-62f4f4819ec6 service nova] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Received unexpected event network-vif-plugged-71a34572-9310-4b13-b628-322b0a2dcf71 for instance with vm_state building and task_state spawning. [ 977.824545] env[70020]: DEBUG nova.compute.manager [req-4e2f264a-65ba-4293-8ea4-8e2d986935c9 req-3a901825-a14c-481f-80cf-62f4f4819ec6 service nova] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Received event network-changed-71a34572-9310-4b13-b628-322b0a2dcf71 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 977.824726] env[70020]: DEBUG nova.compute.manager [req-4e2f264a-65ba-4293-8ea4-8e2d986935c9 req-3a901825-a14c-481f-80cf-62f4f4819ec6 service nova] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Refreshing instance network info cache due to event network-changed-71a34572-9310-4b13-b628-322b0a2dcf71. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 977.824820] env[70020]: DEBUG oslo_concurrency.lockutils [req-4e2f264a-65ba-4293-8ea4-8e2d986935c9 req-3a901825-a14c-481f-80cf-62f4f4819ec6 service nova] Acquiring lock "refresh_cache-58dded95-033a-46d7-b02e-5b2f2551234c" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 977.970855] env[70020]: DEBUG oslo_vmware.api [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Task: {'id': task-3618650, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.040686] env[70020]: DEBUG oslo_concurrency.lockutils [req-1dd4f138-03e9-4c54-81ab-927c6b2b6f42 req-bd97ac39-5bfe-4bc1-b32a-ec62dedf607d service nova] Releasing lock "refresh_cache-2198e7f8-5458-4b97-abb3-0a3c932cebc2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 978.202021] env[70020]: DEBUG nova.network.neutron [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 978.205331] env[70020]: DEBUG nova.scheduler.client.report [None req-52468b9b-9f69-4190-b7fb-0a6185aa7dca tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 978.223339] env[70020]: DEBUG oslo_vmware.api [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b08a9a-ef34-3aaa-a12c-d8c402e4075c, 'name': SearchDatastore_Task, 'duration_secs': 0.009322} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.223506] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 978.223779] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 3a4f2342-58e7-436b-a779-0fa093b52409/3a4f2342-58e7-436b-a779-0fa093b52409.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 978.225032] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-79faf0cc-d9d2-4837-99e0-04210c03f3d8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.234040] env[70020]: DEBUG oslo_vmware.api [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 978.234040] env[70020]: value = "task-3618651" [ 978.234040] env[70020]: _type = "Task" [ 978.234040] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.242165] env[70020]: DEBUG oslo_vmware.api [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618651, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.267498] env[70020]: DEBUG nova.compute.manager [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 978.354350] env[70020]: DEBUG nova.network.neutron [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Updating instance_info_cache with network_info: [{"id": "71a34572-9310-4b13-b628-322b0a2dcf71", "address": "fa:16:3e:a4:01:cc", "network": {"id": "cacbf8b2-6ef5-423e-8457-62a27dc26109", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-319430195-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bfcc2b6300e54620aee884920b416cd6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab0428e-1be7-475e-80e3-1f0aa08d4f86", "external-id": "nsx-vlan-transportzone-976", "segmentation_id": 976, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71a34572-93", "ovs_interfaceid": "71a34572-9310-4b13-b628-322b0a2dcf71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 978.471868] env[70020]: DEBUG oslo_vmware.api [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Task: {'id': task-3618650, 'name': PowerOnVM_Task, 'duration_secs': 0.929171} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.472115] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 978.472302] env[70020]: INFO nova.compute.manager [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Took 8.85 seconds to spawn the instance on the hypervisor. [ 978.472583] env[70020]: DEBUG nova.compute.manager [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 978.473826] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eacd640c-bf84-4fe8-9354-2d5bd3f75c45 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.714701] env[70020]: DEBUG nova.compute.manager [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 978.717712] env[70020]: DEBUG oslo_concurrency.lockutils [None req-52468b9b-9f69-4190-b7fb-0a6185aa7dca tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.024s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 978.720278] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8d64c5e5-0b9b-445c-95f1-ee3f96483c58 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.015s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 978.720515] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8d64c5e5-0b9b-445c-95f1-ee3f96483c58 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 978.722637] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 29.845s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 978.722856] env[70020]: DEBUG nova.objects.instance [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Trying to apply a migration context that does not seem to be set for this instance {{(pid=70020) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 978.749284] env[70020]: DEBUG oslo_vmware.api [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618651, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.485487} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.750809] env[70020]: INFO nova.scheduler.client.report [None req-52468b9b-9f69-4190-b7fb-0a6185aa7dca tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Deleted allocations for instance 4335f92a-897a-4779-be70-4f0754a66d53 [ 978.755469] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 3a4f2342-58e7-436b-a779-0fa093b52409/3a4f2342-58e7-436b-a779-0fa093b52409.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 978.756628] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 978.763029] env[70020]: DEBUG nova.virt.hardware [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 978.763029] env[70020]: DEBUG nova.virt.hardware [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 978.763029] env[70020]: DEBUG nova.virt.hardware [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 978.763029] env[70020]: DEBUG nova.virt.hardware [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 978.764063] env[70020]: DEBUG nova.virt.hardware [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 978.764063] env[70020]: DEBUG nova.virt.hardware [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 978.764063] env[70020]: DEBUG nova.virt.hardware [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 978.764178] env[70020]: DEBUG nova.virt.hardware [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 978.764533] env[70020]: DEBUG nova.virt.hardware [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 978.764730] env[70020]: DEBUG nova.virt.hardware [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 978.765611] env[70020]: DEBUG nova.virt.hardware [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 978.767142] env[70020]: INFO nova.scheduler.client.report [None req-8d64c5e5-0b9b-445c-95f1-ee3f96483c58 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Deleted allocations for instance b53f55c1-1867-410c-9c53-f552ff30d697 [ 978.768715] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ee4ec088-ae55-41be-b8e5-4f16bb65af12 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.773425] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e87adc9d-a0d8-4bf6-9e26-4cdb6dd15791 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.792414] env[70020]: DEBUG oslo_vmware.api [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 978.792414] env[70020]: value = "task-3618652" [ 978.792414] env[70020]: _type = "Task" [ 978.792414] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.794535] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4203d8c1-9796-468b-8843-37ca65e3751e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.802386] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 978.819831] env[70020]: DEBUG oslo_vmware.api [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618652, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.857137] env[70020]: DEBUG oslo_concurrency.lockutils [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Releasing lock "refresh_cache-58dded95-033a-46d7-b02e-5b2f2551234c" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 978.857454] env[70020]: DEBUG nova.compute.manager [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Instance network_info: |[{"id": "71a34572-9310-4b13-b628-322b0a2dcf71", "address": "fa:16:3e:a4:01:cc", "network": {"id": "cacbf8b2-6ef5-423e-8457-62a27dc26109", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-319430195-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bfcc2b6300e54620aee884920b416cd6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab0428e-1be7-475e-80e3-1f0aa08d4f86", "external-id": "nsx-vlan-transportzone-976", "segmentation_id": 976, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71a34572-93", "ovs_interfaceid": "71a34572-9310-4b13-b628-322b0a2dcf71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 978.857735] env[70020]: DEBUG oslo_concurrency.lockutils [req-4e2f264a-65ba-4293-8ea4-8e2d986935c9 req-3a901825-a14c-481f-80cf-62f4f4819ec6 service nova] Acquired lock "refresh_cache-58dded95-033a-46d7-b02e-5b2f2551234c" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 978.857915] env[70020]: DEBUG nova.network.neutron [req-4e2f264a-65ba-4293-8ea4-8e2d986935c9 req-3a901825-a14c-481f-80cf-62f4f4819ec6 service nova] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Refreshing network info cache for port 71a34572-9310-4b13-b628-322b0a2dcf71 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 978.859038] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a4:01:cc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ab0428e-1be7-475e-80e3-1f0aa08d4f86', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '71a34572-9310-4b13-b628-322b0a2dcf71', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 978.866601] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Creating folder: Project (bfcc2b6300e54620aee884920b416cd6). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 978.867482] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8c2cbc02-c610-4f6c-b6a2-f84d75a54522 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.878727] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Created folder: Project (bfcc2b6300e54620aee884920b416cd6) in parent group-v721521. [ 978.878908] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Creating folder: Instances. Parent ref: group-v721754. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 978.879152] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3ef304e6-0a0e-4855-8dd0-b2267bfbd819 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.888166] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Created folder: Instances in parent group-v721754. [ 978.888393] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 978.888565] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 978.888755] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c6c4b80b-ff04-4773-89a9-fae803e36190 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.907540] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 978.907540] env[70020]: value = "task-3618655" [ 978.907540] env[70020]: _type = "Task" [ 978.907540] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.914965] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618655, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.994043] env[70020]: INFO nova.compute.manager [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Took 45.33 seconds to build instance. [ 979.167932] env[70020]: DEBUG nova.network.neutron [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Successfully updated port: a805f6e6-6016-433a-b106-0e686f4bd6ef {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 979.190393] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 979.190833] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-36e62b67-ee27-4463-b181-16ba5171c994 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.199676] env[70020]: DEBUG oslo_vmware.api [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for the task: (returnval){ [ 979.199676] env[70020]: value = "task-3618656" [ 979.199676] env[70020]: _type = "Task" [ 979.199676] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.213525] env[70020]: DEBUG oslo_vmware.api [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618656, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.285022] env[70020]: DEBUG oslo_concurrency.lockutils [None req-52468b9b-9f69-4190-b7fb-0a6185aa7dca tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Lock "4335f92a-897a-4779-be70-4f0754a66d53" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.565s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 979.289254] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8d64c5e5-0b9b-445c-95f1-ee3f96483c58 tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Lock "b53f55c1-1867-410c-9c53-f552ff30d697" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.051s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 979.302906] env[70020]: DEBUG oslo_vmware.api [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618652, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064283} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.303086] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 979.304017] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c53121e8-f606-4de1-8e5c-731d080d7c7e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.309390] env[70020]: DEBUG nova.compute.manager [None req-b36b2519-8704-4952-932a-056f875bb562 tempest-ServerExternalEventsTest-804580838 tempest-ServerExternalEventsTest-804580838-project] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Received event network-changed {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 979.309565] env[70020]: DEBUG nova.compute.manager [None req-b36b2519-8704-4952-932a-056f875bb562 tempest-ServerExternalEventsTest-804580838 tempest-ServerExternalEventsTest-804580838-project] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Refreshing instance network info cache due to event network-changed. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 979.309770] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b36b2519-8704-4952-932a-056f875bb562 tempest-ServerExternalEventsTest-804580838 tempest-ServerExternalEventsTest-804580838-project] Acquiring lock "refresh_cache-563512c2-b80f-4f14-add5-d48e2b7a0ee9" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 979.309916] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b36b2519-8704-4952-932a-056f875bb562 tempest-ServerExternalEventsTest-804580838 tempest-ServerExternalEventsTest-804580838-project] Acquired lock "refresh_cache-563512c2-b80f-4f14-add5-d48e2b7a0ee9" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 979.310083] env[70020]: DEBUG nova.network.neutron [None req-b36b2519-8704-4952-932a-056f875bb562 tempest-ServerExternalEventsTest-804580838 tempest-ServerExternalEventsTest-804580838-project] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 979.339879] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Reconfiguring VM instance instance-0000004d to attach disk [datastore2] 3a4f2342-58e7-436b-a779-0fa093b52409/3a4f2342-58e7-436b-a779-0fa093b52409.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 979.340588] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-adc0afbb-962b-44da-a9e1-dc99322e9073 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.362333] env[70020]: DEBUG oslo_vmware.api [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 979.362333] env[70020]: value = "task-3618657" [ 979.362333] env[70020]: _type = "Task" [ 979.362333] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.372241] env[70020]: DEBUG oslo_vmware.api [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618657, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.419725] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618655, 'name': CreateVM_Task, 'duration_secs': 0.395199} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.420130] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 979.421135] env[70020]: DEBUG oslo_concurrency.lockutils [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 979.421446] env[70020]: DEBUG oslo_concurrency.lockutils [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 979.421960] env[70020]: DEBUG oslo_concurrency.lockutils [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 979.422672] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2513c98-8c05-4669-b154-8a0171c2e763 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.427976] env[70020]: DEBUG oslo_vmware.api [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Waiting for the task: (returnval){ [ 979.427976] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5276851e-94ae-4d73-bc48-7c552326d26b" [ 979.427976] env[70020]: _type = "Task" [ 979.427976] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.440739] env[70020]: DEBUG oslo_vmware.api [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5276851e-94ae-4d73-bc48-7c552326d26b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.496492] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bb04f738-433b-4f3f-935f-0e91cfc54d40 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Lock "563512c2-b80f-4f14-add5-d48e2b7a0ee9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.392s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 979.612093] env[70020]: DEBUG nova.network.neutron [req-4e2f264a-65ba-4293-8ea4-8e2d986935c9 req-3a901825-a14c-481f-80cf-62f4f4819ec6 service nova] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Updated VIF entry in instance network info cache for port 71a34572-9310-4b13-b628-322b0a2dcf71. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 979.612093] env[70020]: DEBUG nova.network.neutron [req-4e2f264a-65ba-4293-8ea4-8e2d986935c9 req-3a901825-a14c-481f-80cf-62f4f4819ec6 service nova] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Updating instance_info_cache with network_info: [{"id": "71a34572-9310-4b13-b628-322b0a2dcf71", "address": "fa:16:3e:a4:01:cc", "network": {"id": "cacbf8b2-6ef5-423e-8457-62a27dc26109", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-319430195-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bfcc2b6300e54620aee884920b416cd6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab0428e-1be7-475e-80e3-1f0aa08d4f86", "external-id": "nsx-vlan-transportzone-976", "segmentation_id": 976, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71a34572-93", "ovs_interfaceid": "71a34572-9310-4b13-b628-322b0a2dcf71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 980.303162] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "refresh_cache-40fa0339-c221-4841-9444-dc957a95cf3b" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.303550] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquired lock "refresh_cache-40fa0339-c221-4841-9444-dc957a95cf3b" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 980.303550] env[70020]: DEBUG nova.network.neutron [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 980.305154] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9b742e06-25b8-4d67-930c-a155f58081e8 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.582s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 980.308413] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2d07f97-f941-4c9f-8cc3-23a87e39b759 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Acquiring lock "563512c2-b80f-4f14-add5-d48e2b7a0ee9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 980.308627] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2d07f97-f941-4c9f-8cc3-23a87e39b759 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Lock "563512c2-b80f-4f14-add5-d48e2b7a0ee9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 980.308817] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2d07f97-f941-4c9f-8cc3-23a87e39b759 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Acquiring lock "563512c2-b80f-4f14-add5-d48e2b7a0ee9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 980.308993] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2d07f97-f941-4c9f-8cc3-23a87e39b759 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Lock "563512c2-b80f-4f14-add5-d48e2b7a0ee9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 980.309171] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2d07f97-f941-4c9f-8cc3-23a87e39b759 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Lock "563512c2-b80f-4f14-add5-d48e2b7a0ee9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 980.310631] env[70020]: DEBUG oslo_concurrency.lockutils [req-4e2f264a-65ba-4293-8ea4-8e2d986935c9 req-3a901825-a14c-481f-80cf-62f4f4819ec6 service nova] Releasing lock "refresh_cache-58dded95-033a-46d7-b02e-5b2f2551234c" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 980.311017] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d008617a-84d4-40a0-9126-35436a16c160 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Acquiring lock "ef85421b-b679-4f38-b052-5695baa2e405" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 980.311210] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d008617a-84d4-40a0-9126-35436a16c160 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Lock "ef85421b-b679-4f38-b052-5695baa2e405" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 980.311386] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d008617a-84d4-40a0-9126-35436a16c160 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Acquiring lock "ef85421b-b679-4f38-b052-5695baa2e405-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 980.311556] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d008617a-84d4-40a0-9126-35436a16c160 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Lock "ef85421b-b679-4f38-b052-5695baa2e405-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 980.311706] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d008617a-84d4-40a0-9126-35436a16c160 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Lock "ef85421b-b679-4f38-b052-5695baa2e405-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 980.315984] env[70020]: DEBUG nova.compute.manager [req-1f57b023-f7af-4afa-aa34-f96555539704 req-0aba4fb0-4686-4dd3-a143-bdc7615716e2 service nova] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Received event network-vif-plugged-a805f6e6-6016-433a-b106-0e686f4bd6ef {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 980.316194] env[70020]: DEBUG oslo_concurrency.lockutils [req-1f57b023-f7af-4afa-aa34-f96555539704 req-0aba4fb0-4686-4dd3-a143-bdc7615716e2 service nova] Acquiring lock "40fa0339-c221-4841-9444-dc957a95cf3b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 980.316381] env[70020]: DEBUG oslo_concurrency.lockutils [req-1f57b023-f7af-4afa-aa34-f96555539704 req-0aba4fb0-4686-4dd3-a143-bdc7615716e2 service nova] Lock "40fa0339-c221-4841-9444-dc957a95cf3b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 980.316535] env[70020]: DEBUG oslo_concurrency.lockutils [req-1f57b023-f7af-4afa-aa34-f96555539704 req-0aba4fb0-4686-4dd3-a143-bdc7615716e2 service nova] Lock "40fa0339-c221-4841-9444-dc957a95cf3b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 980.316693] env[70020]: DEBUG nova.compute.manager [req-1f57b023-f7af-4afa-aa34-f96555539704 req-0aba4fb0-4686-4dd3-a143-bdc7615716e2 service nova] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] No waiting events found dispatching network-vif-plugged-a805f6e6-6016-433a-b106-0e686f4bd6ef {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 980.316851] env[70020]: WARNING nova.compute.manager [req-1f57b023-f7af-4afa-aa34-f96555539704 req-0aba4fb0-4686-4dd3-a143-bdc7615716e2 service nova] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Received unexpected event network-vif-plugged-a805f6e6-6016-433a-b106-0e686f4bd6ef for instance with vm_state building and task_state spawning. [ 980.317015] env[70020]: DEBUG nova.compute.manager [req-1f57b023-f7af-4afa-aa34-f96555539704 req-0aba4fb0-4686-4dd3-a143-bdc7615716e2 service nova] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Received event network-changed-a805f6e6-6016-433a-b106-0e686f4bd6ef {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 980.317170] env[70020]: DEBUG nova.compute.manager [req-1f57b023-f7af-4afa-aa34-f96555539704 req-0aba4fb0-4686-4dd3-a143-bdc7615716e2 service nova] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Refreshing instance network info cache due to event network-changed-a805f6e6-6016-433a-b106-0e686f4bd6ef. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 980.317327] env[70020]: DEBUG oslo_concurrency.lockutils [req-1f57b023-f7af-4afa-aa34-f96555539704 req-0aba4fb0-4686-4dd3-a143-bdc7615716e2 service nova] Acquiring lock "refresh_cache-40fa0339-c221-4841-9444-dc957a95cf3b" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.317738] env[70020]: INFO nova.compute.manager [None req-d008617a-84d4-40a0-9126-35436a16c160 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Terminating instance [ 980.319118] env[70020]: INFO nova.compute.manager [None req-b2d07f97-f941-4c9f-8cc3-23a87e39b759 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Terminating instance [ 980.325379] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.199s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 980.326095] env[70020]: DEBUG nova.objects.instance [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lazy-loading 'resources' on Instance uuid a8982c31-ea86-4a8d-b8c6-006263ef41f8 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 980.341398] env[70020]: DEBUG oslo_vmware.api [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618656, 'name': PowerOffVM_Task, 'duration_secs': 0.413665} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.348644] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 980.349234] env[70020]: DEBUG oslo_vmware.api [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5276851e-94ae-4d73-bc48-7c552326d26b, 'name': SearchDatastore_Task, 'duration_secs': 0.014583} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.349662] env[70020]: DEBUG oslo_vmware.api [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618657, 'name': ReconfigVM_Task, 'duration_secs': 0.763033} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.350402] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02fbe150-2f6a-4d18-9b3c-9485d2a33468 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.352843] env[70020]: DEBUG oslo_concurrency.lockutils [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 980.353081] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 980.353343] env[70020]: DEBUG oslo_concurrency.lockutils [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.353489] env[70020]: DEBUG oslo_concurrency.lockutils [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 980.353660] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 980.353984] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Reconfigured VM instance instance-0000004d to attach disk [datastore2] 3a4f2342-58e7-436b-a779-0fa093b52409/3a4f2342-58e7-436b-a779-0fa093b52409.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 980.354701] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a30cf497-4c9c-4787-9235-a305bf254a4f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.356300] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fbe69498-999f-4b39-ba9e-be1d6c0c8536 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.373665] env[70020]: DEBUG nova.network.neutron [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 980.377793] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cbc7f6c-9a78-44e9-b6e7-409f95afb918 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.380500] env[70020]: DEBUG oslo_vmware.api [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 980.380500] env[70020]: value = "task-3618658" [ 980.380500] env[70020]: _type = "Task" [ 980.380500] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.380717] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 980.380879] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 980.381912] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-708f6c02-7dbe-4f4b-9f86-09e1e3e75030 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.395092] env[70020]: DEBUG oslo_vmware.api [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Waiting for the task: (returnval){ [ 980.395092] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52972a7a-ef82-870d-2fab-3aa3786d76f2" [ 980.395092] env[70020]: _type = "Task" [ 980.395092] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.400985] env[70020]: DEBUG oslo_vmware.api [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618658, 'name': Rename_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.408478] env[70020]: DEBUG oslo_vmware.api [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52972a7a-ef82-870d-2fab-3aa3786d76f2, 'name': SearchDatastore_Task, 'duration_secs': 0.009612} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.409343] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a3ec82d-49f5-41fe-a06b-28623583d0a9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.414309] env[70020]: DEBUG oslo_vmware.api [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Waiting for the task: (returnval){ [ 980.414309] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]524d75ea-19b6-5ea8-2dd2-7b7f539c0f04" [ 980.414309] env[70020]: _type = "Task" [ 980.414309] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.426560] env[70020]: DEBUG oslo_vmware.api [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]524d75ea-19b6-5ea8-2dd2-7b7f539c0f04, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.428448] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 980.428725] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0be9b806-be05-4ce6-a5be-573a37e34d81 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.434126] env[70020]: DEBUG oslo_vmware.api [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for the task: (returnval){ [ 980.434126] env[70020]: value = "task-3618659" [ 980.434126] env[70020]: _type = "Task" [ 980.434126] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.443302] env[70020]: DEBUG oslo_vmware.api [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618659, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.607167] env[70020]: DEBUG nova.network.neutron [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Updating instance_info_cache with network_info: [{"id": "a805f6e6-6016-433a-b106-0e686f4bd6ef", "address": "fa:16:3e:10:be:77", "network": {"id": "b566519a-edda-4c57-92ca-a702e586c638", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-790170115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74b060ffb3ac4ecd95dcd85d4744dc2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa805f6e6-60", "ovs_interfaceid": "a805f6e6-6016-433a-b106-0e686f4bd6ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 980.645895] env[70020]: DEBUG nova.network.neutron [None req-b36b2519-8704-4952-932a-056f875bb562 tempest-ServerExternalEventsTest-804580838 tempest-ServerExternalEventsTest-804580838-project] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Updating instance_info_cache with network_info: [{"id": "7e0131ef-1dbf-4927-bc49-8bcea6a75d84", "address": "fa:16:3e:c0:c4:f9", "network": {"id": "c5fc5c47-102e-4a22-a58e-e111e6f89ead", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.231", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8f53857c59164417b433ba5cd10274ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e0131ef-1d", "ovs_interfaceid": "7e0131ef-1dbf-4927-bc49-8bcea6a75d84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 980.822878] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4892a39f-3994-4751-97a9-f2e8e1f7d6fd tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquiring lock "4b5750d4-98ec-4c70-b214-fad97060b606" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 980.823505] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4892a39f-3994-4751-97a9-f2e8e1f7d6fd tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Lock "4b5750d4-98ec-4c70-b214-fad97060b606" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 980.823505] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4892a39f-3994-4751-97a9-f2e8e1f7d6fd tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquiring lock "4b5750d4-98ec-4c70-b214-fad97060b606-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 980.823652] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4892a39f-3994-4751-97a9-f2e8e1f7d6fd tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Lock "4b5750d4-98ec-4c70-b214-fad97060b606-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 980.823848] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4892a39f-3994-4751-97a9-f2e8e1f7d6fd tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Lock "4b5750d4-98ec-4c70-b214-fad97060b606-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 980.826251] env[70020]: INFO nova.compute.manager [None req-4892a39f-3994-4751-97a9-f2e8e1f7d6fd tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Terminating instance [ 980.835096] env[70020]: DEBUG nova.objects.instance [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lazy-loading 'numa_topology' on Instance uuid a8982c31-ea86-4a8d-b8c6-006263ef41f8 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 980.836216] env[70020]: DEBUG nova.compute.manager [None req-b2d07f97-f941-4c9f-8cc3-23a87e39b759 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 980.836446] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b2d07f97-f941-4c9f-8cc3-23a87e39b759 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 980.837351] env[70020]: DEBUG nova.compute.manager [None req-d008617a-84d4-40a0-9126-35436a16c160 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 980.837537] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d008617a-84d4-40a0-9126-35436a16c160 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 980.838322] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-722f1f22-61c9-4675-a3c5-251f386715f2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.842035] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-656194d2-3454-45de-b8a0-0c22cb9fa4dd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.850340] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-d008617a-84d4-40a0-9126-35436a16c160 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 980.852191] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b3354c97-3183-456e-b061-5ae38ff10ac6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.853617] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2d07f97-f941-4c9f-8cc3-23a87e39b759 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 980.853856] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-784fef40-c10a-430f-a8f0-598464058098 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.860341] env[70020]: DEBUG oslo_vmware.api [None req-b2d07f97-f941-4c9f-8cc3-23a87e39b759 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Waiting for the task: (returnval){ [ 980.860341] env[70020]: value = "task-3618661" [ 980.860341] env[70020]: _type = "Task" [ 980.860341] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.861154] env[70020]: DEBUG oslo_vmware.api [None req-d008617a-84d4-40a0-9126-35436a16c160 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Waiting for the task: (returnval){ [ 980.861154] env[70020]: value = "task-3618660" [ 980.861154] env[70020]: _type = "Task" [ 980.861154] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.873585] env[70020]: DEBUG oslo_vmware.api [None req-d008617a-84d4-40a0-9126-35436a16c160 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618660, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.876848] env[70020]: DEBUG oslo_vmware.api [None req-b2d07f97-f941-4c9f-8cc3-23a87e39b759 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Task: {'id': task-3618661, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.890578] env[70020]: DEBUG oslo_vmware.api [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618658, 'name': Rename_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.924607] env[70020]: DEBUG oslo_vmware.api [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]524d75ea-19b6-5ea8-2dd2-7b7f539c0f04, 'name': SearchDatastore_Task, 'duration_secs': 0.008616} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.924898] env[70020]: DEBUG oslo_concurrency.lockutils [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 980.925191] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 58dded95-033a-46d7-b02e-5b2f2551234c/58dded95-033a-46d7-b02e-5b2f2551234c.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 980.925447] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-af394a35-c965-48c8-813d-f92f634111ff {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.931157] env[70020]: DEBUG oslo_vmware.api [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Waiting for the task: (returnval){ [ 980.931157] env[70020]: value = "task-3618662" [ 980.931157] env[70020]: _type = "Task" [ 980.931157] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.940495] env[70020]: DEBUG oslo_vmware.api [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Task: {'id': task-3618662, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.947036] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] VM already powered off {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 980.947286] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 980.947734] env[70020]: DEBUG oslo_concurrency.lockutils [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.947734] env[70020]: DEBUG oslo_concurrency.lockutils [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 980.947884] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 980.948151] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9099b3a6-22f3-4d0e-aae7-97149e85575f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.965467] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 980.965716] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 980.966502] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac66bc91-c79d-48a3-85d7-3e626d99a347 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.972086] env[70020]: DEBUG oslo_vmware.api [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for the task: (returnval){ [ 980.972086] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52bc796b-004f-4b73-53ac-e8ca757c8c5c" [ 980.972086] env[70020]: _type = "Task" [ 980.972086] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.980959] env[70020]: DEBUG oslo_vmware.api [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52bc796b-004f-4b73-53ac-e8ca757c8c5c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.112026] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Releasing lock "refresh_cache-40fa0339-c221-4841-9444-dc957a95cf3b" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 981.112026] env[70020]: DEBUG nova.compute.manager [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Instance network_info: |[{"id": "a805f6e6-6016-433a-b106-0e686f4bd6ef", "address": "fa:16:3e:10:be:77", "network": {"id": "b566519a-edda-4c57-92ca-a702e586c638", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-790170115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74b060ffb3ac4ecd95dcd85d4744dc2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa805f6e6-60", "ovs_interfaceid": "a805f6e6-6016-433a-b106-0e686f4bd6ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 981.112026] env[70020]: DEBUG oslo_concurrency.lockutils [req-1f57b023-f7af-4afa-aa34-f96555539704 req-0aba4fb0-4686-4dd3-a143-bdc7615716e2 service nova] Acquired lock "refresh_cache-40fa0339-c221-4841-9444-dc957a95cf3b" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 981.112026] env[70020]: DEBUG nova.network.neutron [req-1f57b023-f7af-4afa-aa34-f96555539704 req-0aba4fb0-4686-4dd3-a143-bdc7615716e2 service nova] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Refreshing network info cache for port a805f6e6-6016-433a-b106-0e686f4bd6ef {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 981.112881] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:10:be:77', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1002b79b-224e-41e3-a484-4245a767147a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a805f6e6-6016-433a-b106-0e686f4bd6ef', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 981.122630] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 981.126136] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 981.130021] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-47081c09-ad31-4d7b-a324-db5ec4e3d83e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.148145] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b36b2519-8704-4952-932a-056f875bb562 tempest-ServerExternalEventsTest-804580838 tempest-ServerExternalEventsTest-804580838-project] Releasing lock "refresh_cache-563512c2-b80f-4f14-add5-d48e2b7a0ee9" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 981.149738] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 981.149738] env[70020]: value = "task-3618663" [ 981.149738] env[70020]: _type = "Task" [ 981.149738] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.161904] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618663, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.333036] env[70020]: DEBUG nova.compute.manager [None req-4892a39f-3994-4751-97a9-f2e8e1f7d6fd tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 981.333036] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4892a39f-3994-4751-97a9-f2e8e1f7d6fd tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 981.333843] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c60bc5da-902d-4bde-b4a4-2e9c20431ec2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.337363] env[70020]: DEBUG nova.objects.base [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=70020) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 981.348366] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4892a39f-3994-4751-97a9-f2e8e1f7d6fd tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 981.349998] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3567fb18-376c-4659-ad5d-b7e1053fd664 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.359742] env[70020]: DEBUG oslo_vmware.api [None req-4892a39f-3994-4751-97a9-f2e8e1f7d6fd tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for the task: (returnval){ [ 981.359742] env[70020]: value = "task-3618664" [ 981.359742] env[70020]: _type = "Task" [ 981.359742] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.375986] env[70020]: DEBUG nova.network.neutron [req-1f57b023-f7af-4afa-aa34-f96555539704 req-0aba4fb0-4686-4dd3-a143-bdc7615716e2 service nova] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Updated VIF entry in instance network info cache for port a805f6e6-6016-433a-b106-0e686f4bd6ef. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 981.376408] env[70020]: DEBUG nova.network.neutron [req-1f57b023-f7af-4afa-aa34-f96555539704 req-0aba4fb0-4686-4dd3-a143-bdc7615716e2 service nova] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Updating instance_info_cache with network_info: [{"id": "a805f6e6-6016-433a-b106-0e686f4bd6ef", "address": "fa:16:3e:10:be:77", "network": {"id": "b566519a-edda-4c57-92ca-a702e586c638", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-790170115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74b060ffb3ac4ecd95dcd85d4744dc2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa805f6e6-60", "ovs_interfaceid": "a805f6e6-6016-433a-b106-0e686f4bd6ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.383241] env[70020]: DEBUG oslo_vmware.api [None req-4892a39f-3994-4751-97a9-f2e8e1f7d6fd tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618664, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.392926] env[70020]: DEBUG oslo_vmware.api [None req-b2d07f97-f941-4c9f-8cc3-23a87e39b759 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Task: {'id': task-3618661, 'name': PowerOffVM_Task, 'duration_secs': 0.260943} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.393209] env[70020]: DEBUG oslo_vmware.api [None req-d008617a-84d4-40a0-9126-35436a16c160 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618660, 'name': PowerOffVM_Task, 'duration_secs': 0.195074} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.397586] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2d07f97-f941-4c9f-8cc3-23a87e39b759 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 981.397872] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b2d07f97-f941-4c9f-8cc3-23a87e39b759 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 981.398233] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-d008617a-84d4-40a0-9126-35436a16c160 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 981.398464] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d008617a-84d4-40a0-9126-35436a16c160 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 981.400191] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-604f5fec-ce94-427e-8602-1853bd884d18 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.402380] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3c4fea9d-e955-47b7-b5da-6edf26a68647 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.409813] env[70020]: DEBUG oslo_vmware.api [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618658, 'name': Rename_Task, 'duration_secs': 0.746455} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.413932] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 981.414866] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-02916af1-1e54-4330-9f0e-cce60b773d94 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.421364] env[70020]: DEBUG oslo_vmware.api [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 981.421364] env[70020]: value = "task-3618667" [ 981.421364] env[70020]: _type = "Task" [ 981.421364] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.433201] env[70020]: DEBUG oslo_vmware.api [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618667, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.441892] env[70020]: DEBUG oslo_vmware.api [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Task: {'id': task-3618662, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.480555] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b2d07f97-f941-4c9f-8cc3-23a87e39b759 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 981.480900] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b2d07f97-f941-4c9f-8cc3-23a87e39b759 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 981.481175] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2d07f97-f941-4c9f-8cc3-23a87e39b759 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Deleting the datastore file [datastore2] 563512c2-b80f-4f14-add5-d48e2b7a0ee9 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 981.485545] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7275c84a-3c62-4476-b884-4f0f898b2ffb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.487484] env[70020]: DEBUG oslo_vmware.api [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52bc796b-004f-4b73-53ac-e8ca757c8c5c, 'name': SearchDatastore_Task, 'duration_secs': 0.011616} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.491332] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f9e8838-808d-4024-b349-6693aa88625b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.495883] env[70020]: DEBUG oslo_vmware.api [None req-b2d07f97-f941-4c9f-8cc3-23a87e39b759 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Waiting for the task: (returnval){ [ 981.495883] env[70020]: value = "task-3618668" [ 981.495883] env[70020]: _type = "Task" [ 981.495883] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.505419] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d008617a-84d4-40a0-9126-35436a16c160 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 981.505655] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d008617a-84d4-40a0-9126-35436a16c160 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 981.505848] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-d008617a-84d4-40a0-9126-35436a16c160 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Deleting the datastore file [datastore1] ef85421b-b679-4f38-b052-5695baa2e405 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 981.506207] env[70020]: DEBUG oslo_vmware.api [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for the task: (returnval){ [ 981.506207] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5217d010-1e50-f2bd-0219-279bff83bf37" [ 981.506207] env[70020]: _type = "Task" [ 981.506207] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.509387] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a94fd99e-a5e3-4936-86a2-e72de82ed967 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.511439] env[70020]: DEBUG oslo_vmware.api [None req-b2d07f97-f941-4c9f-8cc3-23a87e39b759 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Task: {'id': task-3618668, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.521288] env[70020]: DEBUG oslo_vmware.api [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5217d010-1e50-f2bd-0219-279bff83bf37, 'name': SearchDatastore_Task, 'duration_secs': 0.014189} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.522591] env[70020]: DEBUG oslo_concurrency.lockutils [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 981.522882] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] a39731d2-0b9b-41fa-b9ac-f80193a26d20/c9cd83bf-fd12-4173-a067-f57d38f23556-rescue.vmdk. {{(pid=70020) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 981.523459] env[70020]: DEBUG oslo_vmware.api [None req-d008617a-84d4-40a0-9126-35436a16c160 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Waiting for the task: (returnval){ [ 981.523459] env[70020]: value = "task-3618669" [ 981.523459] env[70020]: _type = "Task" [ 981.523459] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.523459] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-beff4dbb-5970-4a79-ab01-f89b6af77c45 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.537933] env[70020]: DEBUG oslo_vmware.api [None req-d008617a-84d4-40a0-9126-35436a16c160 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618669, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.538287] env[70020]: DEBUG oslo_vmware.api [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for the task: (returnval){ [ 981.538287] env[70020]: value = "task-3618670" [ 981.538287] env[70020]: _type = "Task" [ 981.538287] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.549351] env[70020]: DEBUG oslo_vmware.api [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618670, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.663938] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618663, 'name': CreateVM_Task, 'duration_secs': 0.400488} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.664235] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 981.664992] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.665222] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 981.665541] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 981.665821] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56367bc8-2460-4586-ab40-c566c75fbbb1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.670351] env[70020]: DEBUG oslo_vmware.api [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 981.670351] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52b0c32b-c253-67e8-6c39-9b6418e02bdf" [ 981.670351] env[70020]: _type = "Task" [ 981.670351] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.682557] env[70020]: DEBUG oslo_vmware.api [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b0c32b-c253-67e8-6c39-9b6418e02bdf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.725479] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81e2c218-3c0b-4950-87ef-9da0574e07fe {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.733885] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2459b39e-d7da-4727-b2e2-a70284eea493 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.767458] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3214dc39-4ed5-4e5f-be6a-3e007d7af0e2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.776309] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22febfcd-d2d0-4b02-aa52-6ae75927a8f5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.791339] env[70020]: DEBUG nova.compute.provider_tree [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 981.871517] env[70020]: DEBUG oslo_vmware.api [None req-4892a39f-3994-4751-97a9-f2e8e1f7d6fd tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618664, 'name': PowerOffVM_Task, 'duration_secs': 0.265815} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.871942] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4892a39f-3994-4751-97a9-f2e8e1f7d6fd tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 981.872200] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4892a39f-3994-4751-97a9-f2e8e1f7d6fd tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 981.872485] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-01c0eb31-a190-4884-bb84-e93e7669e2bd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.884339] env[70020]: DEBUG oslo_concurrency.lockutils [req-1f57b023-f7af-4afa-aa34-f96555539704 req-0aba4fb0-4686-4dd3-a143-bdc7615716e2 service nova] Releasing lock "refresh_cache-40fa0339-c221-4841-9444-dc957a95cf3b" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 981.932913] env[70020]: DEBUG oslo_vmware.api [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618667, 'name': PowerOnVM_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.942544] env[70020]: DEBUG oslo_vmware.api [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Task: {'id': task-3618662, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.555403} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.942837] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 58dded95-033a-46d7-b02e-5b2f2551234c/58dded95-033a-46d7-b02e-5b2f2551234c.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 981.944287] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 981.944287] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8077e171-e786-4ca0-9799-713f9bc85fa7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.951473] env[70020]: DEBUG oslo_vmware.api [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Waiting for the task: (returnval){ [ 981.951473] env[70020]: value = "task-3618672" [ 981.951473] env[70020]: _type = "Task" [ 981.951473] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.959832] env[70020]: DEBUG oslo_vmware.api [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Task: {'id': task-3618672, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.982344] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4892a39f-3994-4751-97a9-f2e8e1f7d6fd tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 981.983056] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4892a39f-3994-4751-97a9-f2e8e1f7d6fd tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 981.983266] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-4892a39f-3994-4751-97a9-f2e8e1f7d6fd tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Deleting the datastore file [datastore2] 4b5750d4-98ec-4c70-b214-fad97060b606 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 981.983540] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-37dd6371-b10f-4ec1-a02a-0da6b1eaf441 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.990205] env[70020]: DEBUG oslo_vmware.api [None req-4892a39f-3994-4751-97a9-f2e8e1f7d6fd tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for the task: (returnval){ [ 981.990205] env[70020]: value = "task-3618673" [ 981.990205] env[70020]: _type = "Task" [ 981.990205] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.999098] env[70020]: DEBUG oslo_vmware.api [None req-4892a39f-3994-4751-97a9-f2e8e1f7d6fd tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618673, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.007149] env[70020]: DEBUG oslo_vmware.api [None req-b2d07f97-f941-4c9f-8cc3-23a87e39b759 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Task: {'id': task-3618668, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164633} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.007511] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2d07f97-f941-4c9f-8cc3-23a87e39b759 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 982.007707] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b2d07f97-f941-4c9f-8cc3-23a87e39b759 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 982.007919] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b2d07f97-f941-4c9f-8cc3-23a87e39b759 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 982.008126] env[70020]: INFO nova.compute.manager [None req-b2d07f97-f941-4c9f-8cc3-23a87e39b759 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Took 1.17 seconds to destroy the instance on the hypervisor. [ 982.008424] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b2d07f97-f941-4c9f-8cc3-23a87e39b759 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 982.008557] env[70020]: DEBUG nova.compute.manager [-] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 982.008652] env[70020]: DEBUG nova.network.neutron [-] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 982.036408] env[70020]: DEBUG oslo_vmware.api [None req-d008617a-84d4-40a0-9126-35436a16c160 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Task: {'id': task-3618669, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165149} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.036663] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-d008617a-84d4-40a0-9126-35436a16c160 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 982.036841] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d008617a-84d4-40a0-9126-35436a16c160 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 982.037434] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d008617a-84d4-40a0-9126-35436a16c160 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 982.037434] env[70020]: INFO nova.compute.manager [None req-d008617a-84d4-40a0-9126-35436a16c160 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Took 1.20 seconds to destroy the instance on the hypervisor. [ 982.037555] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d008617a-84d4-40a0-9126-35436a16c160 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 982.037650] env[70020]: DEBUG nova.compute.manager [-] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 982.037725] env[70020]: DEBUG nova.network.neutron [-] [instance: ef85421b-b679-4f38-b052-5695baa2e405] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 982.048322] env[70020]: DEBUG oslo_vmware.api [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618670, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.468889} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.048848] env[70020]: INFO nova.virt.vmwareapi.ds_util [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] a39731d2-0b9b-41fa-b9ac-f80193a26d20/c9cd83bf-fd12-4173-a067-f57d38f23556-rescue.vmdk. [ 982.049395] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30204c62-2565-4edb-ac20-30548798771e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.075027] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] a39731d2-0b9b-41fa-b9ac-f80193a26d20/c9cd83bf-fd12-4173-a067-f57d38f23556-rescue.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 982.075364] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f54628d9-5cfd-4bb6-b0cc-205d96fe7709 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.094941] env[70020]: DEBUG oslo_vmware.api [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for the task: (returnval){ [ 982.094941] env[70020]: value = "task-3618674" [ 982.094941] env[70020]: _type = "Task" [ 982.094941] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.103108] env[70020]: DEBUG oslo_vmware.api [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618674, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.180630] env[70020]: DEBUG oslo_vmware.api [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b0c32b-c253-67e8-6c39-9b6418e02bdf, 'name': SearchDatastore_Task, 'duration_secs': 0.010003} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.180930] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 982.181189] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 982.181420] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.181556] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 982.181722] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 982.181977] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ef2df0b7-1156-4c18-8bab-eed8e96933da {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.192501] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 982.192668] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 982.193401] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61a13018-9f4f-4732-9f69-df2eb9b057cd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.198397] env[70020]: DEBUG oslo_vmware.api [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 982.198397] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]528b727f-d75b-beb9-f862-dfc5a1f18538" [ 982.198397] env[70020]: _type = "Task" [ 982.198397] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.205763] env[70020]: DEBUG oslo_vmware.api [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]528b727f-d75b-beb9-f862-dfc5a1f18538, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.295112] env[70020]: DEBUG nova.scheduler.client.report [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 982.435030] env[70020]: DEBUG oslo_vmware.api [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618667, 'name': PowerOnVM_Task, 'duration_secs': 0.685802} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.435030] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 982.435030] env[70020]: DEBUG nova.compute.manager [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 982.435030] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-654a14b8-d754-4f22-9e92-91d673018c00 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.461328] env[70020]: DEBUG oslo_vmware.api [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Task: {'id': task-3618672, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.18359} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.461634] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 982.463436] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37125ba3-139f-4498-ba43-71e179247774 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.488580] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Reconfiguring VM instance instance-00000055 to attach disk [datastore2] 58dded95-033a-46d7-b02e-5b2f2551234c/58dded95-033a-46d7-b02e-5b2f2551234c.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 982.489887] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-60873888-57fc-45c6-8729-9c6e6aab4c1d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.517043] env[70020]: DEBUG oslo_vmware.api [None req-4892a39f-3994-4751-97a9-f2e8e1f7d6fd tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Task: {'id': task-3618673, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.499144} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.518592] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-4892a39f-3994-4751-97a9-f2e8e1f7d6fd tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 982.518592] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4892a39f-3994-4751-97a9-f2e8e1f7d6fd tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 982.518696] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4892a39f-3994-4751-97a9-f2e8e1f7d6fd tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 982.518784] env[70020]: INFO nova.compute.manager [None req-4892a39f-3994-4751-97a9-f2e8e1f7d6fd tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Took 1.19 seconds to destroy the instance on the hypervisor. [ 982.519022] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4892a39f-3994-4751-97a9-f2e8e1f7d6fd tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 982.519987] env[70020]: DEBUG oslo_vmware.api [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Waiting for the task: (returnval){ [ 982.519987] env[70020]: value = "task-3618675" [ 982.519987] env[70020]: _type = "Task" [ 982.519987] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.519987] env[70020]: DEBUG nova.compute.manager [-] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 982.519987] env[70020]: DEBUG nova.network.neutron [-] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 982.528034] env[70020]: DEBUG nova.compute.manager [req-51f1ae71-cf32-40b9-8822-61ea246631be req-7477bb62-dccf-4120-8d1d-edda1b34aa50 service nova] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Received event network-vif-deleted-826e6050-1881-4e29-a740-868fa0f44788 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 982.528034] env[70020]: INFO nova.compute.manager [req-51f1ae71-cf32-40b9-8822-61ea246631be req-7477bb62-dccf-4120-8d1d-edda1b34aa50 service nova] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Neutron deleted interface 826e6050-1881-4e29-a740-868fa0f44788; detaching it from the instance and deleting it from the info cache [ 982.528159] env[70020]: DEBUG nova.network.neutron [req-51f1ae71-cf32-40b9-8822-61ea246631be req-7477bb62-dccf-4120-8d1d-edda1b34aa50 service nova] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 982.532703] env[70020]: DEBUG oslo_vmware.api [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Task: {'id': task-3618675, 'name': ReconfigVM_Task} progress is 10%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.605419] env[70020]: DEBUG oslo_vmware.api [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618674, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.708676] env[70020]: DEBUG oslo_vmware.api [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]528b727f-d75b-beb9-f862-dfc5a1f18538, 'name': SearchDatastore_Task, 'duration_secs': 0.017691} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.709524] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63c33697-21c1-41d1-b3ba-a900083386d9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.714578] env[70020]: DEBUG oslo_vmware.api [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 982.714578] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]524957cb-8ab6-0ec7-9945-019ab7a67fa7" [ 982.714578] env[70020]: _type = "Task" [ 982.714578] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.721979] env[70020]: DEBUG oslo_vmware.api [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]524957cb-8ab6-0ec7-9945-019ab7a67fa7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.799964] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.475s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 982.802463] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6e3be2e0-ed2d-4f12-a045-7a7827de5c8a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.132s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 982.802673] env[70020]: DEBUG nova.objects.instance [None req-6e3be2e0-ed2d-4f12-a045-7a7827de5c8a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lazy-loading 'resources' on Instance uuid 9dec24d6-af8a-41b9-920c-e4420fc69417 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 982.906014] env[70020]: DEBUG nova.network.neutron [-] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 982.920638] env[70020]: DEBUG nova.network.neutron [-] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 982.952731] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 983.030904] env[70020]: DEBUG oslo_vmware.api [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Task: {'id': task-3618675, 'name': ReconfigVM_Task, 'duration_secs': 0.275383} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.031176] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Reconfigured VM instance instance-00000055 to attach disk [datastore2] 58dded95-033a-46d7-b02e-5b2f2551234c/58dded95-033a-46d7-b02e-5b2f2551234c.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 983.031772] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-92205627-2289-433c-a51f-7cc327e81546 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.033458] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-11639e72-e5c6-467f-a601-b5be32297549 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.041065] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9747b7fd-7805-4e7a-8d4b-56cf5e079666 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.052262] env[70020]: DEBUG oslo_vmware.api [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Waiting for the task: (returnval){ [ 983.052262] env[70020]: value = "task-3618676" [ 983.052262] env[70020]: _type = "Task" [ 983.052262] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.073915] env[70020]: DEBUG nova.compute.manager [req-51f1ae71-cf32-40b9-8822-61ea246631be req-7477bb62-dccf-4120-8d1d-edda1b34aa50 service nova] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Detach interface failed, port_id=826e6050-1881-4e29-a740-868fa0f44788, reason: Instance ef85421b-b679-4f38-b052-5695baa2e405 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 983.106570] env[70020]: DEBUG oslo_vmware.api [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618674, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.226952] env[70020]: DEBUG oslo_vmware.api [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]524957cb-8ab6-0ec7-9945-019ab7a67fa7, 'name': SearchDatastore_Task, 'duration_secs': 0.009372} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.227234] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 983.227486] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 40fa0339-c221-4841-9444-dc957a95cf3b/40fa0339-c221-4841-9444-dc957a95cf3b.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 983.227764] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-211d8c48-6d59-4d09-8ea1-746187e70e87 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.233772] env[70020]: DEBUG oslo_vmware.api [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 983.233772] env[70020]: value = "task-3618677" [ 983.233772] env[70020]: _type = "Task" [ 983.233772] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.237466] env[70020]: DEBUG nova.network.neutron [-] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.243908] env[70020]: DEBUG oslo_vmware.api [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618677, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.313148] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4a09c1f7-0e9e-4287-b9ef-0c02737bb2d2 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "a8982c31-ea86-4a8d-b8c6-006263ef41f8" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 55.027s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 983.314024] env[70020]: DEBUG oslo_concurrency.lockutils [None req-060e6fb1-df52-4899-b811-6676d6208196 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "a8982c31-ea86-4a8d-b8c6-006263ef41f8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 33.412s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 983.314232] env[70020]: DEBUG oslo_concurrency.lockutils [None req-060e6fb1-df52-4899-b811-6676d6208196 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "a8982c31-ea86-4a8d-b8c6-006263ef41f8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 983.314430] env[70020]: DEBUG oslo_concurrency.lockutils [None req-060e6fb1-df52-4899-b811-6676d6208196 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "a8982c31-ea86-4a8d-b8c6-006263ef41f8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 983.314590] env[70020]: DEBUG oslo_concurrency.lockutils [None req-060e6fb1-df52-4899-b811-6676d6208196 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "a8982c31-ea86-4a8d-b8c6-006263ef41f8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 983.316347] env[70020]: INFO nova.compute.manager [None req-060e6fb1-df52-4899-b811-6676d6208196 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Terminating instance [ 983.408941] env[70020]: INFO nova.compute.manager [-] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Took 1.37 seconds to deallocate network for instance. [ 983.423444] env[70020]: INFO nova.compute.manager [-] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Took 1.41 seconds to deallocate network for instance. [ 983.562271] env[70020]: DEBUG oslo_vmware.api [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Task: {'id': task-3618676, 'name': Rename_Task, 'duration_secs': 0.156214} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.565135] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 983.565952] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f893d70c-1f7c-4f93-8cd3-8f532b0a2247 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.573734] env[70020]: DEBUG oslo_vmware.api [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Waiting for the task: (returnval){ [ 983.573734] env[70020]: value = "task-3618678" [ 983.573734] env[70020]: _type = "Task" [ 983.573734] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.584747] env[70020]: DEBUG oslo_vmware.api [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Task: {'id': task-3618678, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.609740] env[70020]: DEBUG oslo_vmware.api [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618674, 'name': ReconfigVM_Task, 'duration_secs': 1.05474} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.610063] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Reconfigured VM instance instance-00000054 to attach disk [datastore2] a39731d2-0b9b-41fa-b9ac-f80193a26d20/c9cd83bf-fd12-4173-a067-f57d38f23556-rescue.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 983.611100] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1efae7e8-6fc3-4a3e-80f7-f0da24fb5d22 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.640160] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1d886ace-b9dd-42d9-baad-1ab149b91906 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.660030] env[70020]: DEBUG oslo_vmware.api [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for the task: (returnval){ [ 983.660030] env[70020]: value = "task-3618679" [ 983.660030] env[70020]: _type = "Task" [ 983.660030] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.671506] env[70020]: DEBUG oslo_vmware.api [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618679, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.697796] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c129430a-6dc9-4bd9-aecc-c7042424cf17 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.705386] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aac01312-779a-434d-add3-f60a13fa76cc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.739815] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd76e58a-a1e2-4cc2-a2ca-a14e13d9eeb7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.742439] env[70020]: INFO nova.compute.manager [-] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Took 1.22 seconds to deallocate network for instance. [ 983.753095] env[70020]: DEBUG oslo_vmware.api [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618677, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.462095} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.754401] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c16ea349-dfa0-4dcc-a8c2-9bdb363d516e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.758490] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 40fa0339-c221-4841-9444-dc957a95cf3b/40fa0339-c221-4841-9444-dc957a95cf3b.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 983.758785] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 983.758994] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-02282144-0afc-4fe7-832b-33d9643c0987 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.771591] env[70020]: DEBUG nova.compute.provider_tree [None req-6e3be2e0-ed2d-4f12-a045-7a7827de5c8a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 983.774566] env[70020]: DEBUG oslo_vmware.api [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 983.774566] env[70020]: value = "task-3618680" [ 983.774566] env[70020]: _type = "Task" [ 983.774566] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.783404] env[70020]: DEBUG oslo_vmware.api [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618680, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.821083] env[70020]: DEBUG nova.compute.manager [None req-060e6fb1-df52-4899-b811-6676d6208196 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 983.821083] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-060e6fb1-df52-4899-b811-6676d6208196 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 983.821274] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b541e9cf-d576-4342-9289-57dfd7f2fcd9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.831743] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eab3c3e3-867c-4244-876b-1d03f65e975c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.864950] env[70020]: WARNING nova.virt.vmwareapi.vmops [None req-060e6fb1-df52-4899-b811-6676d6208196 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a8982c31-ea86-4a8d-b8c6-006263ef41f8 could not be found. [ 983.865275] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-060e6fb1-df52-4899-b811-6676d6208196 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 983.865510] env[70020]: INFO nova.compute.manager [None req-060e6fb1-df52-4899-b811-6676d6208196 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Took 0.04 seconds to destroy the instance on the hypervisor. [ 983.865824] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-060e6fb1-df52-4899-b811-6676d6208196 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 983.866120] env[70020]: DEBUG nova.compute.manager [-] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 983.866250] env[70020]: DEBUG nova.network.neutron [-] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 983.919029] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d008617a-84d4-40a0-9126-35436a16c160 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 983.932368] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2d07f97-f941-4c9f-8cc3-23a87e39b759 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 984.083688] env[70020]: DEBUG oslo_vmware.api [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Task: {'id': task-3618678, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.170685] env[70020]: DEBUG oslo_vmware.api [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618679, 'name': ReconfigVM_Task, 'duration_secs': 0.353619} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.170962] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 984.171243] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-449aad96-21b8-40dd-9af5-40cc2f209e96 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.178089] env[70020]: DEBUG oslo_vmware.api [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for the task: (returnval){ [ 984.178089] env[70020]: value = "task-3618681" [ 984.178089] env[70020]: _type = "Task" [ 984.178089] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.186031] env[70020]: DEBUG oslo_vmware.api [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618681, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.256246] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4892a39f-3994-4751-97a9-f2e8e1f7d6fd tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 984.276544] env[70020]: DEBUG nova.scheduler.client.report [None req-6e3be2e0-ed2d-4f12-a045-7a7827de5c8a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 984.289687] env[70020]: DEBUG oslo_vmware.api [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618680, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080215} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.289952] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 984.290734] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cd02d51-de8b-4070-bacb-747786765471 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.313727] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] 40fa0339-c221-4841-9444-dc957a95cf3b/40fa0339-c221-4841-9444-dc957a95cf3b.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 984.314666] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1dc3ccd6-0303-46f3-97bd-61a25b785d2f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.334865] env[70020]: DEBUG oslo_vmware.api [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 984.334865] env[70020]: value = "task-3618682" [ 984.334865] env[70020]: _type = "Task" [ 984.334865] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.342721] env[70020]: DEBUG oslo_vmware.api [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618682, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.723801] env[70020]: DEBUG nova.compute.manager [req-3105a758-e59c-480d-a296-46d616780ac7 req-38ac8475-9115-4177-9d69-93726160808b service nova] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Received event network-vif-deleted-7e0131ef-1dbf-4927-bc49-8bcea6a75d84 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 984.723801] env[70020]: DEBUG nova.compute.manager [req-3105a758-e59c-480d-a296-46d616780ac7 req-38ac8475-9115-4177-9d69-93726160808b service nova] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Received event network-vif-deleted-b1d9f41a-978e-4fe2-bb42-1a2ab68ce1b2 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 984.723801] env[70020]: DEBUG oslo_vmware.api [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Task: {'id': task-3618678, 'name': PowerOnVM_Task, 'duration_secs': 0.69548} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.723801] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 984.723801] env[70020]: INFO nova.compute.manager [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Took 8.42 seconds to spawn the instance on the hypervisor. [ 984.723801] env[70020]: DEBUG nova.compute.manager [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 984.723801] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5575198-436d-4ed9-af89-b94123ba95c6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.723801] env[70020]: DEBUG nova.network.neutron [-] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 984.723801] env[70020]: DEBUG oslo_vmware.api [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618681, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.785351] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6e3be2e0-ed2d-4f12-a045-7a7827de5c8a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.983s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 984.788410] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5a338c88-ef3c-4c9f-89ea-0e919b6b9747 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.047s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 984.788696] env[70020]: DEBUG nova.objects.instance [None req-5a338c88-ef3c-4c9f-89ea-0e919b6b9747 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Lazy-loading 'resources' on Instance uuid ff4e958d-0068-429f-af76-5e7d4dd147f3 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 984.811291] env[70020]: INFO nova.scheduler.client.report [None req-6e3be2e0-ed2d-4f12-a045-7a7827de5c8a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Deleted allocations for instance 9dec24d6-af8a-41b9-920c-e4420fc69417 [ 984.846134] env[70020]: DEBUG oslo_vmware.api [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618682, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.107914] env[70020]: INFO nova.compute.manager [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Took 50.57 seconds to build instance. [ 985.109160] env[70020]: INFO nova.compute.manager [-] [instance: a8982c31-ea86-4a8d-b8c6-006263ef41f8] Took 1.24 seconds to deallocate network for instance. [ 985.189739] env[70020]: DEBUG oslo_vmware.api [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618681, 'name': PowerOnVM_Task, 'duration_secs': 0.679992} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.190044] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 985.192693] env[70020]: DEBUG nova.compute.manager [None req-33720f7c-9c4c-49a8-abb3-ad2eedefd3a5 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 985.193550] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21f50171-7213-46d8-b2d7-a933fe942fc4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.321577] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6e3be2e0-ed2d-4f12-a045-7a7827de5c8a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "9dec24d6-af8a-41b9-920c-e4420fc69417" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.089s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 985.346364] env[70020]: DEBUG oslo_vmware.api [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618682, 'name': ReconfigVM_Task, 'duration_secs': 0.636903} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.348988] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Reconfigured VM instance instance-00000056 to attach disk [datastore1] 40fa0339-c221-4841-9444-dc957a95cf3b/40fa0339-c221-4841-9444-dc957a95cf3b.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 985.350566] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e35adfdf-3cd3-466a-a004-a417805dcda4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.358989] env[70020]: DEBUG oslo_vmware.api [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 985.358989] env[70020]: value = "task-3618683" [ 985.358989] env[70020]: _type = "Task" [ 985.358989] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.370583] env[70020]: DEBUG oslo_vmware.api [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618683, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.581620] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdc5b21c-e2f4-4268-af65-36e06fe2abc5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.588793] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b679072-009b-4dbc-bb8c-a8b3779989a0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.625019] env[70020]: DEBUG oslo_concurrency.lockutils [None req-867eda26-e686-4ea5-a2b1-9a1a961f8d0c tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Lock "58dded95-033a-46d7-b02e-5b2f2551234c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.186s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 985.629837] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d6183ce-26fc-4557-9722-b894f83a3705 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.639714] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-979dbfb0-7528-4ba1-b3d6-c7b0e58279a4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.656077] env[70020]: DEBUG nova.compute.provider_tree [None req-5a338c88-ef3c-4c9f-89ea-0e919b6b9747 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 985.868929] env[70020]: DEBUG oslo_vmware.api [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618683, 'name': Rename_Task, 'duration_secs': 0.43795} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.869363] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 985.869660] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a5223817-b373-49f0-8c6a-769f8c42dc75 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.875917] env[70020]: DEBUG oslo_vmware.api [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 985.875917] env[70020]: value = "task-3618684" [ 985.875917] env[70020]: _type = "Task" [ 985.875917] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.889059] env[70020]: DEBUG oslo_vmware.api [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618684, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.150663] env[70020]: DEBUG oslo_concurrency.lockutils [None req-060e6fb1-df52-4899-b811-6676d6208196 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "a8982c31-ea86-4a8d-b8c6-006263ef41f8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.837s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 986.164020] env[70020]: DEBUG nova.scheduler.client.report [None req-5a338c88-ef3c-4c9f-89ea-0e919b6b9747 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 986.387231] env[70020]: DEBUG oslo_vmware.api [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618684, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.549932] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 986.549932] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 986.667084] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5a338c88-ef3c-4c9f-89ea-0e919b6b9747 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.878s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 986.670165] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2beff42b-96e6-4cf4-a9a7-dd1edd86c239 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.421s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 986.670549] env[70020]: DEBUG nova.objects.instance [None req-2beff42b-96e6-4cf4-a9a7-dd1edd86c239 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lazy-loading 'resources' on Instance uuid ea97f6ab-057e-44d3-835a-68b46d241621 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 986.695228] env[70020]: INFO nova.scheduler.client.report [None req-5a338c88-ef3c-4c9f-89ea-0e919b6b9747 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Deleted allocations for instance ff4e958d-0068-429f-af76-5e7d4dd147f3 [ 986.756883] env[70020]: DEBUG oslo_concurrency.lockutils [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Acquiring lock "d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 986.757366] env[70020]: DEBUG oslo_concurrency.lockutils [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Lock "d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 986.887273] env[70020]: DEBUG oslo_vmware.api [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618684, 'name': PowerOnVM_Task, 'duration_secs': 0.816853} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.887854] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 986.888628] env[70020]: INFO nova.compute.manager [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Took 8.17 seconds to spawn the instance on the hypervisor. [ 986.889260] env[70020]: DEBUG nova.compute.manager [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 986.890099] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08cf65e0-b9fb-40d0-90c0-cd5a66e24164 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.057105] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 987.057304] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 987.057519] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 987.057617] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 987.057743] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 987.057887] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 987.058038] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=70020) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 987.058189] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager.update_available_resource {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 987.206117] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5a338c88-ef3c-4c9f-89ea-0e919b6b9747 tempest-ServersListShow298Test-1920415701 tempest-ServersListShow298Test-1920415701-project-member] Lock "ff4e958d-0068-429f-af76-5e7d4dd147f3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.895s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 987.259294] env[70020]: DEBUG nova.compute.manager [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 987.343918] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "056141e3-5628-4451-bd25-f4fa15edd11e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 987.344204] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "056141e3-5628-4451-bd25-f4fa15edd11e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 987.412657] env[70020]: INFO nova.compute.manager [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Took 51.24 seconds to build instance. [ 987.547608] env[70020]: DEBUG nova.compute.manager [req-504e1d3b-771c-410e-9b1c-d80380977240 req-47e39c86-887f-4a0c-a074-01ff520ab8c3 service nova] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Received event network-changed-71a34572-9310-4b13-b628-322b0a2dcf71 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 987.547768] env[70020]: DEBUG nova.compute.manager [req-504e1d3b-771c-410e-9b1c-d80380977240 req-47e39c86-887f-4a0c-a074-01ff520ab8c3 service nova] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Refreshing instance network info cache due to event network-changed-71a34572-9310-4b13-b628-322b0a2dcf71. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 987.547949] env[70020]: DEBUG oslo_concurrency.lockutils [req-504e1d3b-771c-410e-9b1c-d80380977240 req-47e39c86-887f-4a0c-a074-01ff520ab8c3 service nova] Acquiring lock "refresh_cache-58dded95-033a-46d7-b02e-5b2f2551234c" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.548233] env[70020]: DEBUG oslo_concurrency.lockutils [req-504e1d3b-771c-410e-9b1c-d80380977240 req-47e39c86-887f-4a0c-a074-01ff520ab8c3 service nova] Acquired lock "refresh_cache-58dded95-033a-46d7-b02e-5b2f2551234c" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 987.548413] env[70020]: DEBUG nova.network.neutron [req-504e1d3b-771c-410e-9b1c-d80380977240 req-47e39c86-887f-4a0c-a074-01ff520ab8c3 service nova] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Refreshing network info cache for port 71a34572-9310-4b13-b628-322b0a2dcf71 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 987.563742] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 987.581303] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0adc2cf8-12a5-4ce2-afd5-c3f48f465ba0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.590448] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75447f0d-1e98-4cca-9d24-8be84619c792 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.623402] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef3d482c-50f4-451c-96d4-c6e571c41e93 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.632309] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38eff3a4-fa30-472e-bb0a-7aef4df574da {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.648182] env[70020]: DEBUG nova.compute.provider_tree [None req-2beff42b-96e6-4cf4-a9a7-dd1edd86c239 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 987.745135] env[70020]: DEBUG oslo_concurrency.lockutils [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "0453722d-258f-49e3-b61e-f1081eb465c6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 987.745346] env[70020]: DEBUG oslo_concurrency.lockutils [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "0453722d-258f-49e3-b61e-f1081eb465c6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 987.791775] env[70020]: DEBUG oslo_concurrency.lockutils [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 987.848396] env[70020]: DEBUG nova.compute.manager [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 987.915292] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fdd2161a-597c-4591-b3fa-23d1cbc00384 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "40fa0339-c221-4841-9444-dc957a95cf3b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.859s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 988.152398] env[70020]: DEBUG nova.scheduler.client.report [None req-2beff42b-96e6-4cf4-a9a7-dd1edd86c239 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 988.247974] env[70020]: DEBUG nova.compute.manager [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 988.288135] env[70020]: DEBUG nova.network.neutron [req-504e1d3b-771c-410e-9b1c-d80380977240 req-47e39c86-887f-4a0c-a074-01ff520ab8c3 service nova] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Updated VIF entry in instance network info cache for port 71a34572-9310-4b13-b628-322b0a2dcf71. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 988.288771] env[70020]: DEBUG nova.network.neutron [req-504e1d3b-771c-410e-9b1c-d80380977240 req-47e39c86-887f-4a0c-a074-01ff520ab8c3 service nova] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Updating instance_info_cache with network_info: [{"id": "71a34572-9310-4b13-b628-322b0a2dcf71", "address": "fa:16:3e:a4:01:cc", "network": {"id": "cacbf8b2-6ef5-423e-8457-62a27dc26109", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-319430195-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.249", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bfcc2b6300e54620aee884920b416cd6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab0428e-1be7-475e-80e3-1f0aa08d4f86", "external-id": "nsx-vlan-transportzone-976", "segmentation_id": 976, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71a34572-93", "ovs_interfaceid": "71a34572-9310-4b13-b628-322b0a2dcf71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.373165] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 988.401969] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b74850cd-2921-42fc-8e07-81e536e1e37c tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "40fa0339-c221-4841-9444-dc957a95cf3b" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 988.402303] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b74850cd-2921-42fc-8e07-81e536e1e37c tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "40fa0339-c221-4841-9444-dc957a95cf3b" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 988.402496] env[70020]: DEBUG nova.compute.manager [None req-b74850cd-2921-42fc-8e07-81e536e1e37c tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 988.403432] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65d8aabc-2e61-4794-ae9c-c5fca56a5d3e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.410490] env[70020]: DEBUG nova.compute.manager [None req-b74850cd-2921-42fc-8e07-81e536e1e37c tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=70020) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 988.411046] env[70020]: DEBUG nova.objects.instance [None req-b74850cd-2921-42fc-8e07-81e536e1e37c tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lazy-loading 'flavor' on Instance uuid 40fa0339-c221-4841-9444-dc957a95cf3b {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 988.659304] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2beff42b-96e6-4cf4-a9a7-dd1edd86c239 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.988s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 988.660661] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3934d595-d839-4960-9b8a-34524dd8de84 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.570s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 988.661297] env[70020]: DEBUG nova.objects.instance [None req-3934d595-d839-4960-9b8a-34524dd8de84 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Lazy-loading 'resources' on Instance uuid abc194e3-fb6a-4f2a-8886-e2777530a2a3 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 988.679925] env[70020]: INFO nova.scheduler.client.report [None req-2beff42b-96e6-4cf4-a9a7-dd1edd86c239 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Deleted allocations for instance ea97f6ab-057e-44d3-835a-68b46d241621 [ 988.773846] env[70020]: DEBUG oslo_concurrency.lockutils [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 988.792871] env[70020]: DEBUG oslo_concurrency.lockutils [req-504e1d3b-771c-410e-9b1c-d80380977240 req-47e39c86-887f-4a0c-a074-01ff520ab8c3 service nova] Releasing lock "refresh_cache-58dded95-033a-46d7-b02e-5b2f2551234c" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 989.190688] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2beff42b-96e6-4cf4-a9a7-dd1edd86c239 tempest-ServersAdminTestJSON-1183835690 tempest-ServersAdminTestJSON-1183835690-project-member] Lock "ea97f6ab-057e-44d3-835a-68b46d241621" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.518s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 989.417624] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b74850cd-2921-42fc-8e07-81e536e1e37c tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 989.417771] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8437f967-2a88-4c0c-85fa-cb385ab5d506 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.424591] env[70020]: DEBUG oslo_vmware.api [None req-b74850cd-2921-42fc-8e07-81e536e1e37c tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 989.424591] env[70020]: value = "task-3618685" [ 989.424591] env[70020]: _type = "Task" [ 989.424591] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.435366] env[70020]: DEBUG oslo_vmware.api [None req-b74850cd-2921-42fc-8e07-81e536e1e37c tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618685, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.512141] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c984d9f9-9346-45b7-a36f-478505be7ac8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.519261] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea84c6b9-4c3c-49d2-b5b6-bc4a3de6c7d0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.548833] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08d7ad7e-becd-4280-babb-0f150b491455 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.556396] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff473728-2794-4320-8454-c7344955db95 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.572317] env[70020]: DEBUG nova.compute.provider_tree [None req-3934d595-d839-4960-9b8a-34524dd8de84 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 989.936224] env[70020]: DEBUG oslo_vmware.api [None req-b74850cd-2921-42fc-8e07-81e536e1e37c tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618685, 'name': PowerOffVM_Task, 'duration_secs': 0.228825} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.936593] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b74850cd-2921-42fc-8e07-81e536e1e37c tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 989.936795] env[70020]: DEBUG nova.compute.manager [None req-b74850cd-2921-42fc-8e07-81e536e1e37c tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 989.937603] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af35ee46-6aa0-466b-8ae4-8389a55a1f00 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.026094] env[70020]: DEBUG nova.compute.manager [req-3a80859c-0036-4134-9ef8-659c7af9e938 req-85efaa58-0f99-4c30-8291-e2e5c268a53e service nova] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Received event network-changed-71a34572-9310-4b13-b628-322b0a2dcf71 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 990.026304] env[70020]: DEBUG nova.compute.manager [req-3a80859c-0036-4134-9ef8-659c7af9e938 req-85efaa58-0f99-4c30-8291-e2e5c268a53e service nova] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Refreshing instance network info cache due to event network-changed-71a34572-9310-4b13-b628-322b0a2dcf71. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 990.026513] env[70020]: DEBUG oslo_concurrency.lockutils [req-3a80859c-0036-4134-9ef8-659c7af9e938 req-85efaa58-0f99-4c30-8291-e2e5c268a53e service nova] Acquiring lock "refresh_cache-58dded95-033a-46d7-b02e-5b2f2551234c" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.026793] env[70020]: DEBUG oslo_concurrency.lockutils [req-3a80859c-0036-4134-9ef8-659c7af9e938 req-85efaa58-0f99-4c30-8291-e2e5c268a53e service nova] Acquired lock "refresh_cache-58dded95-033a-46d7-b02e-5b2f2551234c" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 990.027174] env[70020]: DEBUG nova.network.neutron [req-3a80859c-0036-4134-9ef8-659c7af9e938 req-85efaa58-0f99-4c30-8291-e2e5c268a53e service nova] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Refreshing network info cache for port 71a34572-9310-4b13-b628-322b0a2dcf71 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 990.075015] env[70020]: DEBUG nova.scheduler.client.report [None req-3934d595-d839-4960-9b8a-34524dd8de84 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 990.449289] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b74850cd-2921-42fc-8e07-81e536e1e37c tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "40fa0339-c221-4841-9444-dc957a95cf3b" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.047s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 990.583188] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3934d595-d839-4960-9b8a-34524dd8de84 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.922s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 990.587046] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf71c047-d989-498e-be15-5729b23343ab tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.389s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 990.587586] env[70020]: DEBUG nova.objects.instance [None req-bf71c047-d989-498e-be15-5729b23343ab tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Lazy-loading 'resources' on Instance uuid edef9245-4048-4ea4-90cc-ebed54498d88 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 990.607320] env[70020]: INFO nova.scheduler.client.report [None req-3934d595-d839-4960-9b8a-34524dd8de84 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Deleted allocations for instance abc194e3-fb6a-4f2a-8886-e2777530a2a3 [ 991.057777] env[70020]: DEBUG nova.network.neutron [req-3a80859c-0036-4134-9ef8-659c7af9e938 req-85efaa58-0f99-4c30-8291-e2e5c268a53e service nova] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Updated VIF entry in instance network info cache for port 71a34572-9310-4b13-b628-322b0a2dcf71. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 991.058708] env[70020]: DEBUG nova.network.neutron [req-3a80859c-0036-4134-9ef8-659c7af9e938 req-85efaa58-0f99-4c30-8291-e2e5c268a53e service nova] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Updating instance_info_cache with network_info: [{"id": "71a34572-9310-4b13-b628-322b0a2dcf71", "address": "fa:16:3e:a4:01:cc", "network": {"id": "cacbf8b2-6ef5-423e-8457-62a27dc26109", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-319430195-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bfcc2b6300e54620aee884920b416cd6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab0428e-1be7-475e-80e3-1f0aa08d4f86", "external-id": "nsx-vlan-transportzone-976", "segmentation_id": 976, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71a34572-93", "ovs_interfaceid": "71a34572-9310-4b13-b628-322b0a2dcf71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 991.125989] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3934d595-d839-4960-9b8a-34524dd8de84 tempest-ServersNegativeTestMultiTenantJSON-14989621 tempest-ServersNegativeTestMultiTenantJSON-14989621-project-member] Lock "abc194e3-fb6a-4f2a-8886-e2777530a2a3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.440s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 991.480753] env[70020]: INFO nova.compute.manager [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Rebuilding instance [ 991.481981] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b5c39f8-30c9-464d-979f-eecc35d44210 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.491734] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b8c59f6-e99d-4371-85a9-1536dc9fb661 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.530137] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba4247b3-03d7-47f9-b7c3-2e5b4767613f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.538115] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c80da0f-695c-4d9e-931e-0a06aae54939 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.555373] env[70020]: DEBUG nova.compute.provider_tree [None req-bf71c047-d989-498e-be15-5729b23343ab tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 991.562547] env[70020]: DEBUG oslo_concurrency.lockutils [req-3a80859c-0036-4134-9ef8-659c7af9e938 req-85efaa58-0f99-4c30-8291-e2e5c268a53e service nova] Releasing lock "refresh_cache-58dded95-033a-46d7-b02e-5b2f2551234c" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 991.563315] env[70020]: DEBUG nova.compute.manager [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 991.564448] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d78bf3d1-1540-40de-9caf-7b5d925f13ce {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.915517] env[70020]: DEBUG oslo_concurrency.lockutils [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Acquiring lock "58dded95-033a-46d7-b02e-5b2f2551234c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 991.916362] env[70020]: DEBUG oslo_concurrency.lockutils [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Lock "58dded95-033a-46d7-b02e-5b2f2551234c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 991.918281] env[70020]: DEBUG oslo_concurrency.lockutils [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Acquiring lock "58dded95-033a-46d7-b02e-5b2f2551234c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 991.918281] env[70020]: DEBUG oslo_concurrency.lockutils [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Lock "58dded95-033a-46d7-b02e-5b2f2551234c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 991.918281] env[70020]: DEBUG oslo_concurrency.lockutils [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Lock "58dded95-033a-46d7-b02e-5b2f2551234c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 991.920414] env[70020]: INFO nova.compute.manager [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Terminating instance [ 992.061021] env[70020]: DEBUG nova.scheduler.client.report [None req-bf71c047-d989-498e-be15-5729b23343ab tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 992.429291] env[70020]: DEBUG nova.compute.manager [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 992.429616] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 992.431129] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f08e8206-f5d1-4946-b2de-e75eb307a5b5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.443622] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 992.443937] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aa9aef19-6163-4817-9295-c4fed2153bf4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.454758] env[70020]: DEBUG oslo_vmware.api [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Waiting for the task: (returnval){ [ 992.454758] env[70020]: value = "task-3618686" [ 992.454758] env[70020]: _type = "Task" [ 992.454758] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.469715] env[70020]: DEBUG oslo_vmware.api [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Task: {'id': task-3618686, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.567265] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf71c047-d989-498e-be15-5729b23343ab tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.978s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 992.568128] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.867s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 992.568429] env[70020]: DEBUG nova.objects.instance [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Lazy-loading 'resources' on Instance uuid 2ccd34c8-b433-41be-b800-d06a0595bff9 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 992.581748] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 992.582122] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-18140dc0-5732-41b5-bdea-5c399ccd2d35 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.590428] env[70020]: DEBUG oslo_vmware.api [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 992.590428] env[70020]: value = "task-3618687" [ 992.590428] env[70020]: _type = "Task" [ 992.590428] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.596295] env[70020]: INFO nova.scheduler.client.report [None req-bf71c047-d989-498e-be15-5729b23343ab tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Deleted allocations for instance edef9245-4048-4ea4-90cc-ebed54498d88 [ 992.605552] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] VM already powered off {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 992.605959] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 992.607412] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fa58861-e207-4379-9df8-1732ce634522 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.614266] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 992.614524] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bdd78e26-e30a-46ad-9c78-e41c9c90d7d8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.688226] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 992.688226] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 992.688226] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Deleting the datastore file [datastore1] 40fa0339-c221-4841-9444-dc957a95cf3b {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 992.688484] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7466d43b-28bf-4800-93d1-33cef11eb560 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.694639] env[70020]: DEBUG oslo_vmware.api [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 992.694639] env[70020]: value = "task-3618689" [ 992.694639] env[70020]: _type = "Task" [ 992.694639] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.702193] env[70020]: DEBUG oslo_vmware.api [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618689, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.964721] env[70020]: DEBUG oslo_vmware.api [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Task: {'id': task-3618686, 'name': PowerOffVM_Task, 'duration_secs': 0.204849} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.966168] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 992.966443] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 992.966798] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fa89b052-8f7f-4805-8477-cd453f97f4d2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.087066] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 993.087924] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 993.089769] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Deleting the datastore file [datastore2] 58dded95-033a-46d7-b02e-5b2f2551234c {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 993.090129] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3352aba5-7749-4435-8c91-86b6d0908092 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.099933] env[70020]: DEBUG oslo_vmware.api [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Waiting for the task: (returnval){ [ 993.099933] env[70020]: value = "task-3618691" [ 993.099933] env[70020]: _type = "Task" [ 993.099933] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.108401] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf71c047-d989-498e-be15-5729b23343ab tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Lock "edef9245-4048-4ea4-90cc-ebed54498d88" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.356s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 993.117973] env[70020]: DEBUG oslo_vmware.api [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Task: {'id': task-3618691, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.214388] env[70020]: DEBUG oslo_vmware.api [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618689, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.179137} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.214656] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 993.214839] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 993.215023] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 993.519292] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50f2dd76-43fe-48a5-b707-55c8a4418a3b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.527305] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa6ea505-c4a0-4874-bfb1-634ce518f2e6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.562082] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3967d205-7181-4fdd-b112-cd7433f7cd40 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.571915] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba97baba-db22-41cb-847e-9cea87f52177 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.586642] env[70020]: DEBUG nova.compute.provider_tree [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 993.611577] env[70020]: DEBUG oslo_vmware.api [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Task: {'id': task-3618691, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.175549} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.612045] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 993.612356] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 993.612622] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 993.612837] env[70020]: INFO nova.compute.manager [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Took 1.18 seconds to destroy the instance on the hypervisor. [ 993.613093] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 993.613287] env[70020]: DEBUG nova.compute.manager [-] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 993.613383] env[70020]: DEBUG nova.network.neutron [-] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 993.790765] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f736da04-0083-4d76-8f10-820dc62cbe19 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Acquiring lock "7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 993.791046] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f736da04-0083-4d76-8f10-820dc62cbe19 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Lock "7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 993.791269] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f736da04-0083-4d76-8f10-820dc62cbe19 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Acquiring lock "7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 993.791454] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f736da04-0083-4d76-8f10-820dc62cbe19 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Lock "7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 993.791622] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f736da04-0083-4d76-8f10-820dc62cbe19 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Lock "7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 993.795582] env[70020]: INFO nova.compute.manager [None req-f736da04-0083-4d76-8f10-820dc62cbe19 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Terminating instance [ 993.948901] env[70020]: DEBUG nova.compute.manager [req-f7a40a64-8510-49f6-8ba1-0a92c47b4782 req-87f28f46-c6cb-41e4-ad19-7a2ec77e6357 service nova] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Received event network-vif-deleted-71a34572-9310-4b13-b628-322b0a2dcf71 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 993.949099] env[70020]: INFO nova.compute.manager [req-f7a40a64-8510-49f6-8ba1-0a92c47b4782 req-87f28f46-c6cb-41e4-ad19-7a2ec77e6357 service nova] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Neutron deleted interface 71a34572-9310-4b13-b628-322b0a2dcf71; detaching it from the instance and deleting it from the info cache [ 993.949298] env[70020]: DEBUG nova.network.neutron [req-f7a40a64-8510-49f6-8ba1-0a92c47b4782 req-87f28f46-c6cb-41e4-ad19-7a2ec77e6357 service nova] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.111232] env[70020]: ERROR nova.scheduler.client.report [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] [req-1d4cd5d6-1c2f-4b67-87bf-04798a913a5f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1d4cd5d6-1c2f-4b67-87bf-04798a913a5f"}]} [ 994.127896] env[70020]: DEBUG nova.scheduler.client.report [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 994.146873] env[70020]: DEBUG nova.scheduler.client.report [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 994.147103] env[70020]: DEBUG nova.compute.provider_tree [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 994.164126] env[70020]: DEBUG nova.scheduler.client.report [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 994.191639] env[70020]: DEBUG nova.scheduler.client.report [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 994.275017] env[70020]: DEBUG nova.virt.hardware [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 994.275975] env[70020]: DEBUG nova.virt.hardware [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 994.276190] env[70020]: DEBUG nova.virt.hardware [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 994.276391] env[70020]: DEBUG nova.virt.hardware [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 994.276537] env[70020]: DEBUG nova.virt.hardware [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 994.276682] env[70020]: DEBUG nova.virt.hardware [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 994.277192] env[70020]: DEBUG nova.virt.hardware [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 994.277392] env[70020]: DEBUG nova.virt.hardware [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 994.277573] env[70020]: DEBUG nova.virt.hardware [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 994.277740] env[70020]: DEBUG nova.virt.hardware [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 994.277915] env[70020]: DEBUG nova.virt.hardware [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 994.279678] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-776ca6f0-6fec-4045-ae2a-5156319fe872 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.290874] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68a20913-3c71-48ed-9f75-48c50b9260ae {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.301274] env[70020]: DEBUG nova.compute.manager [None req-f736da04-0083-4d76-8f10-820dc62cbe19 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 994.301399] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-f736da04-0083-4d76-8f10-820dc62cbe19 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 994.310681] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ef68d73-1203-4582-99b0-d472aa10f3f8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.314292] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:10:be:77', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1002b79b-224e-41e3-a484-4245a767147a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a805f6e6-6016-433a-b106-0e686f4bd6ef', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 994.321959] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 994.324770] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 994.325400] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6aad3904-9bb9-4a58-a175-6f5e06b82f06 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.349633] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-f736da04-0083-4d76-8f10-820dc62cbe19 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 994.350519] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4054f4e5-2333-42ba-89c8-56c6d327a388 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.354384] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 994.354384] env[70020]: value = "task-3618692" [ 994.354384] env[70020]: _type = "Task" [ 994.354384] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.358442] env[70020]: DEBUG oslo_vmware.api [None req-f736da04-0083-4d76-8f10-820dc62cbe19 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Waiting for the task: (returnval){ [ 994.358442] env[70020]: value = "task-3618693" [ 994.358442] env[70020]: _type = "Task" [ 994.358442] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.364639] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618692, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.373025] env[70020]: DEBUG oslo_vmware.api [None req-f736da04-0083-4d76-8f10-820dc62cbe19 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618693, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.423394] env[70020]: DEBUG nova.network.neutron [-] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.452612] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bf32a9f3-c91e-413c-a0d1-634058dd0869 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.466736] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8ad861a-15b2-42e3-a1ba-7bf7ed5451e6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.500830] env[70020]: DEBUG nova.compute.manager [req-f7a40a64-8510-49f6-8ba1-0a92c47b4782 req-87f28f46-c6cb-41e4-ad19-7a2ec77e6357 service nova] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Detach interface failed, port_id=71a34572-9310-4b13-b628-322b0a2dcf71, reason: Instance 58dded95-033a-46d7-b02e-5b2f2551234c could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 994.619076] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f3b15f8-35ff-46b7-8115-265806071b37 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.627115] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7717a07c-da8a-4c5f-85a3-8542fc043a1f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.660824] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c730001a-eb4c-49e5-b0ea-40d0da088938 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.669149] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af737ee9-2040-4a0a-ad9d-186f21e16db5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.682731] env[70020]: DEBUG nova.compute.provider_tree [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 994.875658] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618692, 'name': CreateVM_Task, 'duration_secs': 0.41717} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.875658] env[70020]: DEBUG oslo_vmware.api [None req-f736da04-0083-4d76-8f10-820dc62cbe19 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618693, 'name': PowerOffVM_Task, 'duration_secs': 0.21014} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.875658] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 994.875853] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-f736da04-0083-4d76-8f10-820dc62cbe19 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 994.876163] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-f736da04-0083-4d76-8f10-820dc62cbe19 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 994.877699] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 994.877699] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 994.877699] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 994.877699] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-444ec91c-6746-48c1-af2f-0ba31793823c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.879259] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96bb2807-e643-462d-b0e2-32d2cff09ab2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.883496] env[70020]: DEBUG oslo_vmware.api [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 994.883496] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52e229cb-d3f7-b325-a1a8-50a3ce2e0d80" [ 994.883496] env[70020]: _type = "Task" [ 994.883496] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.891120] env[70020]: DEBUG oslo_vmware.api [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52e229cb-d3f7-b325-a1a8-50a3ce2e0d80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.929026] env[70020]: INFO nova.compute.manager [-] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Took 1.31 seconds to deallocate network for instance. [ 994.939655] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-f736da04-0083-4d76-8f10-820dc62cbe19 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 994.939802] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-f736da04-0083-4d76-8f10-820dc62cbe19 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 994.943028] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-f736da04-0083-4d76-8f10-820dc62cbe19 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Deleting the datastore file [datastore2] 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 994.943028] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c3cf3a42-dc44-4048-9cb4-313631f94e8f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.950026] env[70020]: DEBUG oslo_vmware.api [None req-f736da04-0083-4d76-8f10-820dc62cbe19 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Waiting for the task: (returnval){ [ 994.950026] env[70020]: value = "task-3618695" [ 994.950026] env[70020]: _type = "Task" [ 994.950026] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.959608] env[70020]: DEBUG oslo_vmware.api [None req-f736da04-0083-4d76-8f10-820dc62cbe19 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618695, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.220250] env[70020]: DEBUG nova.scheduler.client.report [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Updated inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with generation 117 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 995.220522] env[70020]: DEBUG nova.compute.provider_tree [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Updating resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d generation from 117 to 118 during operation: update_inventory {{(pid=70020) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 995.220701] env[70020]: DEBUG nova.compute.provider_tree [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 995.399231] env[70020]: DEBUG oslo_vmware.api [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52e229cb-d3f7-b325-a1a8-50a3ce2e0d80, 'name': SearchDatastore_Task, 'duration_secs': 0.00992} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.399577] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 995.399820] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 995.401071] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.402613] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 995.402830] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 995.403229] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ccfa3107-c9be-4f39-a758-96fb9dc5664c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.417606] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 995.417812] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 995.418635] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6aff974f-3e55-4869-a923-5442192d03f9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.425538] env[70020]: DEBUG oslo_vmware.api [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 995.425538] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52c5b0e9-9325-7818-b96c-24a12770113b" [ 995.425538] env[70020]: _type = "Task" [ 995.425538] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.434270] env[70020]: DEBUG oslo_vmware.api [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52c5b0e9-9325-7818-b96c-24a12770113b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.435045] env[70020]: DEBUG oslo_concurrency.lockutils [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 995.464447] env[70020]: DEBUG oslo_vmware.api [None req-f736da04-0083-4d76-8f10-820dc62cbe19 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618695, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.186762} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.465027] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-f736da04-0083-4d76-8f10-820dc62cbe19 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 995.465301] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-f736da04-0083-4d76-8f10-820dc62cbe19 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 995.466055] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-f736da04-0083-4d76-8f10-820dc62cbe19 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 995.466472] env[70020]: INFO nova.compute.manager [None req-f736da04-0083-4d76-8f10-820dc62cbe19 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Took 1.16 seconds to destroy the instance on the hypervisor. [ 995.466787] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f736da04-0083-4d76-8f10-820dc62cbe19 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 995.467051] env[70020]: DEBUG nova.compute.manager [-] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 995.467228] env[70020]: DEBUG nova.network.neutron [-] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 995.732248] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.159s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 995.733679] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.219s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 995.733937] env[70020]: DEBUG nova.objects.instance [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Lazy-loading 'resources' on Instance uuid 2198e7f8-5458-4b97-abb3-0a3c932cebc2 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 995.779041] env[70020]: INFO nova.scheduler.client.report [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Deleted allocations for instance 2ccd34c8-b433-41be-b800-d06a0595bff9 [ 995.950043] env[70020]: DEBUG oslo_vmware.api [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52c5b0e9-9325-7818-b96c-24a12770113b, 'name': SearchDatastore_Task, 'duration_secs': 0.022572} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.950043] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a134eb7-bd7f-4617-98d8-d85f2d62f6ed {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.955368] env[70020]: DEBUG oslo_vmware.api [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 995.955368] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5280562b-f26a-3e7d-e82e-e02a70c32e88" [ 995.955368] env[70020]: _type = "Task" [ 995.955368] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.963447] env[70020]: DEBUG oslo_vmware.api [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5280562b-f26a-3e7d-e82e-e02a70c32e88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.078798] env[70020]: DEBUG nova.compute.manager [req-37b00cdb-07c5-4bb4-85c0-742d63529b53 req-8ab7b2db-49f4-4b71-836b-295e0c083fd3 service nova] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Received event network-vif-deleted-3a364f68-e0c0-48ae-ab70-f576cd06610e {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 996.079038] env[70020]: INFO nova.compute.manager [req-37b00cdb-07c5-4bb4-85c0-742d63529b53 req-8ab7b2db-49f4-4b71-836b-295e0c083fd3 service nova] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Neutron deleted interface 3a364f68-e0c0-48ae-ab70-f576cd06610e; detaching it from the instance and deleting it from the info cache [ 996.079179] env[70020]: DEBUG nova.network.neutron [req-37b00cdb-07c5-4bb4-85c0-742d63529b53 req-8ab7b2db-49f4-4b71-836b-295e0c083fd3 service nova] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.239151] env[70020]: DEBUG nova.objects.instance [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Lazy-loading 'numa_topology' on Instance uuid 2198e7f8-5458-4b97-abb3-0a3c932cebc2 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 996.287297] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6bd36a00-0167-46d5-b140-016c87ee80de tempest-ImagesOneServerNegativeTestJSON-165560685 tempest-ImagesOneServerNegativeTestJSON-165560685-project-member] Lock "2ccd34c8-b433-41be-b800-d06a0595bff9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.577s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.362525] env[70020]: DEBUG nova.network.neutron [-] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.467998] env[70020]: DEBUG oslo_vmware.api [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5280562b-f26a-3e7d-e82e-e02a70c32e88, 'name': SearchDatastore_Task, 'duration_secs': 0.015628} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.469145] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 996.470208] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 40fa0339-c221-4841-9444-dc957a95cf3b/40fa0339-c221-4841-9444-dc957a95cf3b.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 996.470607] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-98d47429-8132-4bbd-9e4c-56e6552ef208 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.481544] env[70020]: DEBUG oslo_vmware.api [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 996.481544] env[70020]: value = "task-3618696" [ 996.481544] env[70020]: _type = "Task" [ 996.481544] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.490072] env[70020]: DEBUG oslo_vmware.api [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618696, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.581606] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3dbb86a4-374d-489e-b283-c0fc5ca3cee7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.590781] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcf23767-1dc6-4808-a7f2-ae6959ee619a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.624424] env[70020]: DEBUG nova.compute.manager [req-37b00cdb-07c5-4bb4-85c0-742d63529b53 req-8ab7b2db-49f4-4b71-836b-295e0c083fd3 service nova] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Detach interface failed, port_id=3a364f68-e0c0-48ae-ab70-f576cd06610e, reason: Instance 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 996.744978] env[70020]: DEBUG nova.objects.base [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Object Instance<2198e7f8-5458-4b97-abb3-0a3c932cebc2> lazy-loaded attributes: resources,numa_topology {{(pid=70020) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 996.865844] env[70020]: INFO nova.compute.manager [-] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Took 1.40 seconds to deallocate network for instance. [ 996.996137] env[70020]: DEBUG oslo_vmware.api [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618696, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.477281} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.996403] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 40fa0339-c221-4841-9444-dc957a95cf3b/40fa0339-c221-4841-9444-dc957a95cf3b.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 996.996617] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 996.996863] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-431938b3-e83d-40dc-b391-2b74c1b44bf0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.006352] env[70020]: DEBUG oslo_vmware.api [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 997.006352] env[70020]: value = "task-3618697" [ 997.006352] env[70020]: _type = "Task" [ 997.006352] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.017601] env[70020]: DEBUG oslo_vmware.api [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618697, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.123102] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8609ee9a-e92d-47aa-9d0c-498aef9da3fd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.131020] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7134b3f1-864f-485e-a1d8-95506e48277a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.161045] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de8f5df4-3d03-4213-8036-3f50b7852dfa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.172014] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e8c225c-35d2-4dc0-8545-3e9f7a7b1d14 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.186937] env[70020]: DEBUG nova.compute.provider_tree [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 997.376768] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f736da04-0083-4d76-8f10-820dc62cbe19 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 997.513905] env[70020]: DEBUG oslo_vmware.api [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618697, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06803} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.514233] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 997.514982] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eaf4523-8921-490c-80e6-a1753a6c9f72 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.538578] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] 40fa0339-c221-4841-9444-dc957a95cf3b/40fa0339-c221-4841-9444-dc957a95cf3b.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 997.538716] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-96adf155-d07c-4a9a-a6ea-9b9af76554dc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.560337] env[70020]: DEBUG oslo_vmware.api [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 997.560337] env[70020]: value = "task-3618698" [ 997.560337] env[70020]: _type = "Task" [ 997.560337] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.569461] env[70020]: DEBUG oslo_vmware.api [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618698, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.718801] env[70020]: ERROR nova.scheduler.client.report [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [req-4644c6f5-ba4a-4275-aae8-3e24c73edf92] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4644c6f5-ba4a-4275-aae8-3e24c73edf92"}]} [ 997.744792] env[70020]: DEBUG nova.scheduler.client.report [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 997.761336] env[70020]: DEBUG nova.scheduler.client.report [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 997.761930] env[70020]: DEBUG nova.compute.provider_tree [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 997.776056] env[70020]: DEBUG nova.scheduler.client.report [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 997.805219] env[70020]: DEBUG nova.scheduler.client.report [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 997.851772] env[70020]: DEBUG oslo_concurrency.lockutils [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquiring lock "b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 997.852617] env[70020]: DEBUG oslo_concurrency.lockutils [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lock "b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 998.070259] env[70020]: DEBUG oslo_vmware.api [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618698, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.208203] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d560530-a3d4-4106-a285-e5d2526c512d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.221134] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73f1a315-f6f5-46fd-a725-548d97dcbf02 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.253958] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bfd1bc5-3eff-40a9-b1fc-bc17af1b6fe3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.262573] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4b9ef68-26d1-48e9-8902-cf2f3f7da9a0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.280400] env[70020]: DEBUG nova.compute.provider_tree [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 998.358325] env[70020]: DEBUG nova.compute.manager [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 998.443509] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquiring lock "5b69d3b2-c236-45f9-b35b-a9992b9c1c79" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 998.443786] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lock "5b69d3b2-c236-45f9-b35b-a9992b9c1c79" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 998.570725] env[70020]: DEBUG oslo_vmware.api [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618698, 'name': ReconfigVM_Task, 'duration_secs': 0.686365} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.571092] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Reconfigured VM instance instance-00000056 to attach disk [datastore1] 40fa0339-c221-4841-9444-dc957a95cf3b/40fa0339-c221-4841-9444-dc957a95cf3b.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 998.571721] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-85a03943-4231-4027-b9e9-095d04ca58f8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.578064] env[70020]: DEBUG oslo_vmware.api [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 998.578064] env[70020]: value = "task-3618699" [ 998.578064] env[70020]: _type = "Task" [ 998.578064] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.586409] env[70020]: DEBUG oslo_vmware.api [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618699, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.806029] env[70020]: ERROR nova.scheduler.client.report [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [req-64db4b94-a948-4031-a238-d013000c30b6] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-64db4b94-a948-4031-a238-d013000c30b6"}]} [ 998.830634] env[70020]: DEBUG nova.scheduler.client.report [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 998.850403] env[70020]: DEBUG nova.scheduler.client.report [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 998.850403] env[70020]: DEBUG nova.compute.provider_tree [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 998.866275] env[70020]: DEBUG nova.scheduler.client.report [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 998.885266] env[70020]: DEBUG oslo_concurrency.lockutils [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 998.900089] env[70020]: DEBUG nova.scheduler.client.report [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 998.949127] env[70020]: DEBUG nova.compute.manager [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 999.090600] env[70020]: DEBUG oslo_vmware.api [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618699, 'name': Rename_Task, 'duration_secs': 0.160074} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.090935] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 999.091196] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4314c5f3-3a3b-4533-b8d3-c65656cc5d2e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.098982] env[70020]: DEBUG oslo_vmware.api [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 999.098982] env[70020]: value = "task-3618700" [ 999.098982] env[70020]: _type = "Task" [ 999.098982] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.106580] env[70020]: DEBUG oslo_vmware.api [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618700, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.238744] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddb06ff6-e64d-4511-bdf7-c1826724f08c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.246405] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2590050e-e812-48d1-84bc-13682e85dd2b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.277808] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-496e2fe7-70a1-4243-9ff8-14432f968753 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.286586] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc86a9a5-5f04-4333-8d83-f3f9cfc13531 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.299612] env[70020]: DEBUG nova.compute.provider_tree [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 999.466948] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 999.608801] env[70020]: DEBUG oslo_vmware.api [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618700, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.838935] env[70020]: DEBUG nova.scheduler.client.report [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Updated inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with generation 120 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 999.838935] env[70020]: DEBUG nova.compute.provider_tree [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Updating resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d generation from 120 to 121 during operation: update_inventory {{(pid=70020) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 999.838935] env[70020]: DEBUG nova.compute.provider_tree [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1000.111564] env[70020]: DEBUG oslo_vmware.api [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618700, 'name': PowerOnVM_Task, 'duration_secs': 0.634029} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.111564] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1000.111564] env[70020]: DEBUG nova.compute.manager [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1000.112575] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1052f157-52e4-46d1-a5e8-c2d4c9f426b5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.344483] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 4.611s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1000.347396] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.145s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1000.348831] env[70020]: INFO nova.compute.claims [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1000.624327] env[70020]: INFO nova.compute.manager [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] bringing vm to original state: 'stopped' [ 1000.855154] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a7297264-8a92-45c4-8c11-4e78539885a6 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Lock "2198e7f8-5458-4b97-abb3-0a3c932cebc2" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 46.801s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1000.856435] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Lock "2198e7f8-5458-4b97-abb3-0a3c932cebc2" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 24.497s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1000.856730] env[70020]: INFO nova.compute.manager [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Unshelving [ 1001.631534] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "40fa0339-c221-4841-9444-dc957a95cf3b" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1001.631796] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "40fa0339-c221-4841-9444-dc957a95cf3b" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1001.631969] env[70020]: DEBUG nova.compute.manager [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1001.635408] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48aa7285-2912-4b51-835b-87a109d514e9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.642482] env[70020]: DEBUG nova.compute.manager [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=70020) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1001.724557] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f349ac36-bf14-443a-be7d-437d85b165d2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.735579] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96bf5790-2e5f-4fcb-bde0-aeed23e1e01e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.770040] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c77bb01-983f-40b3-82ac-5979904e9c62 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.777247] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f435edcb-f1d8-43f4-8c6d-1ec06381576c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.791223] env[70020]: DEBUG nova.compute.provider_tree [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1001.885727] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1002.147018] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1002.147265] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7a7c2819-9528-4b3b-aa0a-b6421319c80b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.155394] env[70020]: DEBUG oslo_vmware.api [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1002.155394] env[70020]: value = "task-3618701" [ 1002.155394] env[70020]: _type = "Task" [ 1002.155394] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.163628] env[70020]: DEBUG oslo_vmware.api [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618701, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.295232] env[70020]: DEBUG nova.scheduler.client.report [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1002.665930] env[70020]: DEBUG oslo_vmware.api [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618701, 'name': PowerOffVM_Task, 'duration_secs': 0.203835} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.666410] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1002.666765] env[70020]: DEBUG nova.compute.manager [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1002.667678] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59e18bfc-82ee-435b-829a-7cbc60d407bd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.715417] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "04de1a07-cf38-41e0-be96-237bbe1ead83" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1002.715417] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "04de1a07-cf38-41e0-be96-237bbe1ead83" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1002.801912] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.454s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1002.802497] env[70020]: DEBUG nova.compute.manager [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1002.805228] env[70020]: DEBUG oslo_concurrency.lockutils [None req-794eefb1-95ee-4305-b893-f9f2b3ba52cc tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.843s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1002.805467] env[70020]: DEBUG nova.objects.instance [None req-794eefb1-95ee-4305-b893-f9f2b3ba52cc tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lazy-loading 'resources' on Instance uuid f56e88f6-3a25-44d9-bdb1-cc4291169c9c {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1003.182280] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "40fa0339-c221-4841-9444-dc957a95cf3b" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.550s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1003.217162] env[70020]: DEBUG nova.compute.manager [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1003.308314] env[70020]: DEBUG nova.compute.utils [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1003.310071] env[70020]: DEBUG nova.compute.manager [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1003.310300] env[70020]: DEBUG nova.network.neutron [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1003.362742] env[70020]: DEBUG nova.policy [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b67375d5e85b4ba99d47120945bbf0f6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '52cd193f3ca7403a986d72f072590f4f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1003.562798] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5ee4af63-d055-43c9-8388-9b514e2ab591 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "40fa0339-c221-4841-9444-dc957a95cf3b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.563046] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5ee4af63-d055-43c9-8388-9b514e2ab591 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "40fa0339-c221-4841-9444-dc957a95cf3b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1003.563249] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5ee4af63-d055-43c9-8388-9b514e2ab591 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "40fa0339-c221-4841-9444-dc957a95cf3b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.563434] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5ee4af63-d055-43c9-8388-9b514e2ab591 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "40fa0339-c221-4841-9444-dc957a95cf3b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1003.563599] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5ee4af63-d055-43c9-8388-9b514e2ab591 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "40fa0339-c221-4841-9444-dc957a95cf3b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1003.566465] env[70020]: INFO nova.compute.manager [None req-5ee4af63-d055-43c9-8388-9b514e2ab591 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Terminating instance [ 1003.643901] env[70020]: DEBUG nova.network.neutron [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Successfully created port: 06d51c44-f553-4e0b-9d14-0c9f56972fee {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1003.663144] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea9a79c1-4ecd-4aae-aa40-82a6d0974920 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.670922] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eb4008e-8b83-4b93-b8aa-8854c2ea035c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.704761] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55358593-b699-4067-9420-6ae5a9f8a13e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.710352] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.713968] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e98b08c6-f3a2-495b-8fa4-9df6280fab65 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.733622] env[70020]: DEBUG nova.compute.provider_tree [None req-794eefb1-95ee-4305-b893-f9f2b3ba52cc tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1003.757096] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.816161] env[70020]: DEBUG nova.compute.manager [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1004.075445] env[70020]: DEBUG nova.compute.manager [None req-5ee4af63-d055-43c9-8388-9b514e2ab591 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1004.075643] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5ee4af63-d055-43c9-8388-9b514e2ab591 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1004.076599] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c29fd4bc-21fb-402f-b384-2d9fd6fff2df {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.085533] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5ee4af63-d055-43c9-8388-9b514e2ab591 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1004.085760] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-01be9bdf-9c68-4cff-a005-288ab6dfb5b2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.144829] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5ee4af63-d055-43c9-8388-9b514e2ab591 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1004.145055] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5ee4af63-d055-43c9-8388-9b514e2ab591 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1004.145239] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ee4af63-d055-43c9-8388-9b514e2ab591 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Deleting the datastore file [datastore1] 40fa0339-c221-4841-9444-dc957a95cf3b {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1004.145496] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b0690a45-b3c1-40f9-8ac2-b56cd8531ff3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.151306] env[70020]: DEBUG oslo_vmware.api [None req-5ee4af63-d055-43c9-8388-9b514e2ab591 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1004.151306] env[70020]: value = "task-3618703" [ 1004.151306] env[70020]: _type = "Task" [ 1004.151306] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.159057] env[70020]: DEBUG oslo_vmware.api [None req-5ee4af63-d055-43c9-8388-9b514e2ab591 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618703, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.239419] env[70020]: DEBUG nova.scheduler.client.report [None req-794eefb1-95ee-4305-b893-f9f2b3ba52cc tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1004.661638] env[70020]: DEBUG oslo_vmware.api [None req-5ee4af63-d055-43c9-8388-9b514e2ab591 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618703, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.179301} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.662995] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ee4af63-d055-43c9-8388-9b514e2ab591 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1004.662995] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5ee4af63-d055-43c9-8388-9b514e2ab591 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1004.662995] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5ee4af63-d055-43c9-8388-9b514e2ab591 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1004.662995] env[70020]: INFO nova.compute.manager [None req-5ee4af63-d055-43c9-8388-9b514e2ab591 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1004.662995] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5ee4af63-d055-43c9-8388-9b514e2ab591 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1004.663297] env[70020]: DEBUG nova.compute.manager [-] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1004.663297] env[70020]: DEBUG nova.network.neutron [-] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1004.744476] env[70020]: DEBUG oslo_concurrency.lockutils [None req-794eefb1-95ee-4305-b893-f9f2b3ba52cc tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.939s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1004.747029] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.944s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1004.748295] env[70020]: INFO nova.compute.claims [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1004.769999] env[70020]: INFO nova.scheduler.client.report [None req-794eefb1-95ee-4305-b893-f9f2b3ba52cc tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Deleted allocations for instance f56e88f6-3a25-44d9-bdb1-cc4291169c9c [ 1004.826030] env[70020]: DEBUG nova.compute.manager [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1004.856598] env[70020]: DEBUG nova.virt.hardware [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1004.856598] env[70020]: DEBUG nova.virt.hardware [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1004.856905] env[70020]: DEBUG nova.virt.hardware [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1004.856905] env[70020]: DEBUG nova.virt.hardware [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1004.857201] env[70020]: DEBUG nova.virt.hardware [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1004.857201] env[70020]: DEBUG nova.virt.hardware [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1004.857299] env[70020]: DEBUG nova.virt.hardware [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1004.857459] env[70020]: DEBUG nova.virt.hardware [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1004.857618] env[70020]: DEBUG nova.virt.hardware [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1004.857765] env[70020]: DEBUG nova.virt.hardware [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1004.857935] env[70020]: DEBUG nova.virt.hardware [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1004.858879] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b701aeb-0625-48ab-8998-4fb46d8a5c50 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.868161] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02a2c549-f934-43ee-aa9a-c2a64688b8a7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.967987] env[70020]: DEBUG nova.compute.manager [req-3bd8aa36-1768-420d-93b0-4dc928022acb req-4cdeaca2-dab5-4a37-b7db-5d60b031020b service nova] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Received event network-vif-deleted-a805f6e6-6016-433a-b106-0e686f4bd6ef {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1004.968127] env[70020]: INFO nova.compute.manager [req-3bd8aa36-1768-420d-93b0-4dc928022acb req-4cdeaca2-dab5-4a37-b7db-5d60b031020b service nova] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Neutron deleted interface a805f6e6-6016-433a-b106-0e686f4bd6ef; detaching it from the instance and deleting it from the info cache [ 1004.968695] env[70020]: DEBUG nova.network.neutron [req-3bd8aa36-1768-420d-93b0-4dc928022acb req-4cdeaca2-dab5-4a37-b7db-5d60b031020b service nova] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.146238] env[70020]: DEBUG nova.network.neutron [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Successfully updated port: 06d51c44-f553-4e0b-9d14-0c9f56972fee {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1005.280007] env[70020]: DEBUG oslo_concurrency.lockutils [None req-794eefb1-95ee-4305-b893-f9f2b3ba52cc tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "f56e88f6-3a25-44d9-bdb1-cc4291169c9c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.109s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1005.447262] env[70020]: DEBUG nova.network.neutron [-] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.471372] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c47fbc1a-b684-4372-99e8-d44cfef23276 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.481878] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc27abfb-2667-49cd-8bc0-33b78edd097e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.514209] env[70020]: DEBUG nova.compute.manager [req-3bd8aa36-1768-420d-93b0-4dc928022acb req-4cdeaca2-dab5-4a37-b7db-5d60b031020b service nova] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Detach interface failed, port_id=a805f6e6-6016-433a-b106-0e686f4bd6ef, reason: Instance 40fa0339-c221-4841-9444-dc957a95cf3b could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1005.651098] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "refresh_cache-42d20396-883d-4141-a226-61f476057cbe" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.651279] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquired lock "refresh_cache-42d20396-883d-4141-a226-61f476057cbe" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1005.651444] env[70020]: DEBUG nova.network.neutron [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1005.949073] env[70020]: INFO nova.compute.manager [-] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Took 1.29 seconds to deallocate network for instance. [ 1006.055183] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-442a0b87-3f19-4b31-b0da-eb0dfb8f590a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.062525] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-570d411e-21c7-4964-9393-5c9962c21ce7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.092765] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51833466-8054-4487-ad3f-780b4584a0e5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.100037] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-290a7beb-f571-4d41-aa14-b0bc0b3ded56 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.112893] env[70020]: DEBUG nova.compute.provider_tree [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1006.190610] env[70020]: DEBUG nova.network.neutron [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1006.333810] env[70020]: DEBUG nova.network.neutron [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Updating instance_info_cache with network_info: [{"id": "06d51c44-f553-4e0b-9d14-0c9f56972fee", "address": "fa:16:3e:b3:6c:13", "network": {"id": "8a7c653b-f915-404f-a4ab-bea48b7e6574", "bridge": "br-int", "label": "tempest-ImagesTestJSON-377932844-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52cd193f3ca7403a986d72f072590f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06d51c44-f5", "ovs_interfaceid": "06d51c44-f553-4e0b-9d14-0c9f56972fee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1006.455703] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5ee4af63-d055-43c9-8388-9b514e2ab591 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1006.617093] env[70020]: DEBUG nova.scheduler.client.report [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1006.836970] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Releasing lock "refresh_cache-42d20396-883d-4141-a226-61f476057cbe" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1006.837408] env[70020]: DEBUG nova.compute.manager [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Instance network_info: |[{"id": "06d51c44-f553-4e0b-9d14-0c9f56972fee", "address": "fa:16:3e:b3:6c:13", "network": {"id": "8a7c653b-f915-404f-a4ab-bea48b7e6574", "bridge": "br-int", "label": "tempest-ImagesTestJSON-377932844-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52cd193f3ca7403a986d72f072590f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06d51c44-f5", "ovs_interfaceid": "06d51c44-f553-4e0b-9d14-0c9f56972fee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1006.837868] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b3:6c:13', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a0734cc4-5718-45e2-9f98-0ded96880bef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '06d51c44-f553-4e0b-9d14-0c9f56972fee', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1006.845419] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1006.845649] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 42d20396-883d-4141-a226-61f476057cbe] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1006.845866] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3ab56fb3-6f1f-429c-b022-3a3d60e6ab9a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.868037] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1006.868037] env[70020]: value = "task-3618704" [ 1006.868037] env[70020]: _type = "Task" [ 1006.868037] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.873876] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618704, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.994262] env[70020]: DEBUG nova.compute.manager [req-ee8f3bbe-b008-4568-a7a3-742bf0d0d5ad req-4eaa5bc0-acff-4715-ba1e-6a977c840153 service nova] [instance: 42d20396-883d-4141-a226-61f476057cbe] Received event network-vif-plugged-06d51c44-f553-4e0b-9d14-0c9f56972fee {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1006.994503] env[70020]: DEBUG oslo_concurrency.lockutils [req-ee8f3bbe-b008-4568-a7a3-742bf0d0d5ad req-4eaa5bc0-acff-4715-ba1e-6a977c840153 service nova] Acquiring lock "42d20396-883d-4141-a226-61f476057cbe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1006.994710] env[70020]: DEBUG oslo_concurrency.lockutils [req-ee8f3bbe-b008-4568-a7a3-742bf0d0d5ad req-4eaa5bc0-acff-4715-ba1e-6a977c840153 service nova] Lock "42d20396-883d-4141-a226-61f476057cbe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1006.994875] env[70020]: DEBUG oslo_concurrency.lockutils [req-ee8f3bbe-b008-4568-a7a3-742bf0d0d5ad req-4eaa5bc0-acff-4715-ba1e-6a977c840153 service nova] Lock "42d20396-883d-4141-a226-61f476057cbe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1006.995052] env[70020]: DEBUG nova.compute.manager [req-ee8f3bbe-b008-4568-a7a3-742bf0d0d5ad req-4eaa5bc0-acff-4715-ba1e-6a977c840153 service nova] [instance: 42d20396-883d-4141-a226-61f476057cbe] No waiting events found dispatching network-vif-plugged-06d51c44-f553-4e0b-9d14-0c9f56972fee {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1006.995221] env[70020]: WARNING nova.compute.manager [req-ee8f3bbe-b008-4568-a7a3-742bf0d0d5ad req-4eaa5bc0-acff-4715-ba1e-6a977c840153 service nova] [instance: 42d20396-883d-4141-a226-61f476057cbe] Received unexpected event network-vif-plugged-06d51c44-f553-4e0b-9d14-0c9f56972fee for instance with vm_state building and task_state spawning. [ 1006.995461] env[70020]: DEBUG nova.compute.manager [req-ee8f3bbe-b008-4568-a7a3-742bf0d0d5ad req-4eaa5bc0-acff-4715-ba1e-6a977c840153 service nova] [instance: 42d20396-883d-4141-a226-61f476057cbe] Received event network-changed-06d51c44-f553-4e0b-9d14-0c9f56972fee {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1006.995623] env[70020]: DEBUG nova.compute.manager [req-ee8f3bbe-b008-4568-a7a3-742bf0d0d5ad req-4eaa5bc0-acff-4715-ba1e-6a977c840153 service nova] [instance: 42d20396-883d-4141-a226-61f476057cbe] Refreshing instance network info cache due to event network-changed-06d51c44-f553-4e0b-9d14-0c9f56972fee. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1006.995830] env[70020]: DEBUG oslo_concurrency.lockutils [req-ee8f3bbe-b008-4568-a7a3-742bf0d0d5ad req-4eaa5bc0-acff-4715-ba1e-6a977c840153 service nova] Acquiring lock "refresh_cache-42d20396-883d-4141-a226-61f476057cbe" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1006.995966] env[70020]: DEBUG oslo_concurrency.lockutils [req-ee8f3bbe-b008-4568-a7a3-742bf0d0d5ad req-4eaa5bc0-acff-4715-ba1e-6a977c840153 service nova] Acquired lock "refresh_cache-42d20396-883d-4141-a226-61f476057cbe" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1006.996131] env[70020]: DEBUG nova.network.neutron [req-ee8f3bbe-b008-4568-a7a3-742bf0d0d5ad req-4eaa5bc0-acff-4715-ba1e-6a977c840153 service nova] [instance: 42d20396-883d-4141-a226-61f476057cbe] Refreshing network info cache for port 06d51c44-f553-4e0b-9d14-0c9f56972fee {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1007.121420] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.375s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1007.121872] env[70020]: DEBUG nova.compute.manager [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1007.124788] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 24.172s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1007.125710] env[70020]: DEBUG nova.objects.instance [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Trying to apply a migration context that does not seem to be set for this instance {{(pid=70020) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1007.377116] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618704, 'name': CreateVM_Task, 'duration_secs': 0.350735} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.377385] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 42d20396-883d-4141-a226-61f476057cbe] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1007.377940] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.378135] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1007.378443] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1007.378685] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4c0b2e0-9dfe-4f6a-ae6d-786e40789a55 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.382811] env[70020]: DEBUG oslo_vmware.api [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 1007.382811] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52679cb4-7b31-a824-57de-f59df00216a3" [ 1007.382811] env[70020]: _type = "Task" [ 1007.382811] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.390024] env[70020]: DEBUG oslo_vmware.api [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52679cb4-7b31-a824-57de-f59df00216a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.629514] env[70020]: DEBUG nova.compute.utils [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1007.630865] env[70020]: DEBUG nova.compute.manager [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1007.631042] env[70020]: DEBUG nova.network.neutron [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1007.687654] env[70020]: DEBUG nova.policy [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '471cc5386a83467b93af8417dfbe09c5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7252df4458bb4a1283a419877e101bf5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1007.728257] env[70020]: DEBUG nova.network.neutron [req-ee8f3bbe-b008-4568-a7a3-742bf0d0d5ad req-4eaa5bc0-acff-4715-ba1e-6a977c840153 service nova] [instance: 42d20396-883d-4141-a226-61f476057cbe] Updated VIF entry in instance network info cache for port 06d51c44-f553-4e0b-9d14-0c9f56972fee. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1007.728602] env[70020]: DEBUG nova.network.neutron [req-ee8f3bbe-b008-4568-a7a3-742bf0d0d5ad req-4eaa5bc0-acff-4715-ba1e-6a977c840153 service nova] [instance: 42d20396-883d-4141-a226-61f476057cbe] Updating instance_info_cache with network_info: [{"id": "06d51c44-f553-4e0b-9d14-0c9f56972fee", "address": "fa:16:3e:b3:6c:13", "network": {"id": "8a7c653b-f915-404f-a4ab-bea48b7e6574", "bridge": "br-int", "label": "tempest-ImagesTestJSON-377932844-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52cd193f3ca7403a986d72f072590f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06d51c44-f5", "ovs_interfaceid": "06d51c44-f553-4e0b-9d14-0c9f56972fee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1007.893532] env[70020]: DEBUG oslo_vmware.api [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52679cb4-7b31-a824-57de-f59df00216a3, 'name': SearchDatastore_Task, 'duration_secs': 0.009661} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.894158] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1007.894158] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1007.894333] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.894593] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1007.894663] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1007.894907] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-308831d2-4a00-49d2-8122-5e7d87808c8f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.903134] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1007.903455] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1007.904174] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-006417ad-ee3e-4b9c-b9d0-a4d65fd66b0a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.909227] env[70020]: DEBUG oslo_vmware.api [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 1007.909227] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52842149-d22b-5b94-6dba-62c6f66de1b6" [ 1007.909227] env[70020]: _type = "Task" [ 1007.909227] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.916872] env[70020]: DEBUG oslo_vmware.api [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52842149-d22b-5b94-6dba-62c6f66de1b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.974071] env[70020]: DEBUG nova.network.neutron [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Successfully created port: 92b2e42f-1b28-4c86-a59d-e06f2adc11da {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1008.137152] env[70020]: DEBUG nova.compute.manager [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1008.140775] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2d102ed8-f24e-4f87-8eb5-45b90c562dc1 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1008.141687] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d008617a-84d4-40a0-9126-35436a16c160 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.223s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1008.141902] env[70020]: DEBUG nova.objects.instance [None req-d008617a-84d4-40a0-9126-35436a16c160 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Lazy-loading 'resources' on Instance uuid ef85421b-b679-4f38-b052-5695baa2e405 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1008.234290] env[70020]: DEBUG oslo_concurrency.lockutils [req-ee8f3bbe-b008-4568-a7a3-742bf0d0d5ad req-4eaa5bc0-acff-4715-ba1e-6a977c840153 service nova] Releasing lock "refresh_cache-42d20396-883d-4141-a226-61f476057cbe" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1008.419616] env[70020]: DEBUG oslo_vmware.api [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52842149-d22b-5b94-6dba-62c6f66de1b6, 'name': SearchDatastore_Task, 'duration_secs': 0.009163} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.420456] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dcfec8d4-3e48-4291-a181-daf2b6ba8419 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.425330] env[70020]: DEBUG oslo_vmware.api [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 1008.425330] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]525732b6-0c32-060d-45a2-7ebdcb9cd0c6" [ 1008.425330] env[70020]: _type = "Task" [ 1008.425330] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.432609] env[70020]: DEBUG oslo_vmware.api [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]525732b6-0c32-060d-45a2-7ebdcb9cd0c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.938233] env[70020]: DEBUG oslo_vmware.api [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]525732b6-0c32-060d-45a2-7ebdcb9cd0c6, 'name': SearchDatastore_Task, 'duration_secs': 0.010266} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.940766] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1008.941379] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 42d20396-883d-4141-a226-61f476057cbe/42d20396-883d-4141-a226-61f476057cbe.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1008.941652] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-42fc99cc-8d36-477e-b333-d463d9fb0c1d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.948972] env[70020]: DEBUG oslo_vmware.api [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 1008.948972] env[70020]: value = "task-3618705" [ 1008.948972] env[70020]: _type = "Task" [ 1008.948972] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.960376] env[70020]: DEBUG oslo_vmware.api [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618705, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.965690] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c823a5be-bfd8-41b9-9d67-7097a4fc6681 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.972323] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83c8b476-3899-4ba8-a408-613e7fb7c1d8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.002413] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52714829-f486-44bd-b330-a084ce5f20f3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.010191] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67b46b3c-d740-4c54-9299-1a32a61f9851 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.023238] env[70020]: DEBUG nova.compute.provider_tree [None req-d008617a-84d4-40a0-9126-35436a16c160 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1009.079316] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c88234d3-f5e6-437a-82bc-2a153915e881 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "d45966fe-98ff-4466-8e7e-90550034742f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1009.113318] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c88234d3-f5e6-437a-82bc-2a153915e881 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "d45966fe-98ff-4466-8e7e-90550034742f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1009.113318] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c88234d3-f5e6-437a-82bc-2a153915e881 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "d45966fe-98ff-4466-8e7e-90550034742f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1009.113318] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c88234d3-f5e6-437a-82bc-2a153915e881 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "d45966fe-98ff-4466-8e7e-90550034742f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1009.113318] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c88234d3-f5e6-437a-82bc-2a153915e881 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "d45966fe-98ff-4466-8e7e-90550034742f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1009.113318] env[70020]: INFO nova.compute.manager [None req-c88234d3-f5e6-437a-82bc-2a153915e881 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Terminating instance [ 1009.154023] env[70020]: DEBUG nova.compute.manager [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1009.201020] env[70020]: DEBUG nova.virt.hardware [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1009.201020] env[70020]: DEBUG nova.virt.hardware [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1009.201020] env[70020]: DEBUG nova.virt.hardware [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1009.201020] env[70020]: DEBUG nova.virt.hardware [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1009.205019] env[70020]: DEBUG nova.virt.hardware [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1009.205019] env[70020]: DEBUG nova.virt.hardware [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1009.205019] env[70020]: DEBUG nova.virt.hardware [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1009.205019] env[70020]: DEBUG nova.virt.hardware [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1009.205019] env[70020]: DEBUG nova.virt.hardware [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1009.205019] env[70020]: DEBUG nova.virt.hardware [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1009.205019] env[70020]: DEBUG nova.virt.hardware [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1009.205019] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77da3f91-bfec-4953-8b59-0e0ccfa32d2e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.218660] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe531f71-9212-445a-b59e-41955297fbf0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.404519] env[70020]: DEBUG nova.compute.manager [req-5e741dbb-d598-4fb9-8b92-207fb58880b1 req-ec1ef5ae-22bb-43b1-9d3e-db62c7a8ab89 service nova] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Received event network-vif-plugged-92b2e42f-1b28-4c86-a59d-e06f2adc11da {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1009.404751] env[70020]: DEBUG oslo_concurrency.lockutils [req-5e741dbb-d598-4fb9-8b92-207fb58880b1 req-ec1ef5ae-22bb-43b1-9d3e-db62c7a8ab89 service nova] Acquiring lock "3dedfa48-0839-462e-8c32-ba5252f07ac0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1009.404963] env[70020]: DEBUG oslo_concurrency.lockutils [req-5e741dbb-d598-4fb9-8b92-207fb58880b1 req-ec1ef5ae-22bb-43b1-9d3e-db62c7a8ab89 service nova] Lock "3dedfa48-0839-462e-8c32-ba5252f07ac0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1009.405571] env[70020]: DEBUG oslo_concurrency.lockutils [req-5e741dbb-d598-4fb9-8b92-207fb58880b1 req-ec1ef5ae-22bb-43b1-9d3e-db62c7a8ab89 service nova] Lock "3dedfa48-0839-462e-8c32-ba5252f07ac0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1009.405768] env[70020]: DEBUG nova.compute.manager [req-5e741dbb-d598-4fb9-8b92-207fb58880b1 req-ec1ef5ae-22bb-43b1-9d3e-db62c7a8ab89 service nova] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] No waiting events found dispatching network-vif-plugged-92b2e42f-1b28-4c86-a59d-e06f2adc11da {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1009.405934] env[70020]: WARNING nova.compute.manager [req-5e741dbb-d598-4fb9-8b92-207fb58880b1 req-ec1ef5ae-22bb-43b1-9d3e-db62c7a8ab89 service nova] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Received unexpected event network-vif-plugged-92b2e42f-1b28-4c86-a59d-e06f2adc11da for instance with vm_state building and task_state spawning. [ 1009.459320] env[70020]: DEBUG oslo_vmware.api [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618705, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.491358] env[70020]: DEBUG nova.network.neutron [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Successfully updated port: 92b2e42f-1b28-4c86-a59d-e06f2adc11da {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1009.527414] env[70020]: DEBUG nova.scheduler.client.report [None req-d008617a-84d4-40a0-9126-35436a16c160 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1009.588182] env[70020]: DEBUG nova.compute.manager [None req-c88234d3-f5e6-437a-82bc-2a153915e881 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1009.588456] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c88234d3-f5e6-437a-82bc-2a153915e881 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1009.589336] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca051ef7-2676-4879-9129-86831175ce19 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.596965] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c88234d3-f5e6-437a-82bc-2a153915e881 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1009.597224] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6a4c3eb3-b8d1-43a4-9e71-3585e292c0b7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.602517] env[70020]: DEBUG oslo_vmware.api [None req-c88234d3-f5e6-437a-82bc-2a153915e881 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 1009.602517] env[70020]: value = "task-3618706" [ 1009.602517] env[70020]: _type = "Task" [ 1009.602517] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.610285] env[70020]: DEBUG oslo_vmware.api [None req-c88234d3-f5e6-437a-82bc-2a153915e881 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618706, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.960211] env[70020]: DEBUG oslo_vmware.api [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618705, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.632391} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.960532] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 42d20396-883d-4141-a226-61f476057cbe/42d20396-883d-4141-a226-61f476057cbe.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1009.960799] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1009.961076] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c5b80a77-08cf-422b-b67d-30eb78d6a78a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.968047] env[70020]: DEBUG oslo_vmware.api [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 1009.968047] env[70020]: value = "task-3618707" [ 1009.968047] env[70020]: _type = "Task" [ 1009.968047] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.976269] env[70020]: DEBUG oslo_vmware.api [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618707, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.994979] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Acquiring lock "refresh_cache-3dedfa48-0839-462e-8c32-ba5252f07ac0" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.995140] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Acquired lock "refresh_cache-3dedfa48-0839-462e-8c32-ba5252f07ac0" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1009.995515] env[70020]: DEBUG nova.network.neutron [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1010.031840] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d008617a-84d4-40a0-9126-35436a16c160 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.890s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1010.034391] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2d07f97-f941-4c9f-8cc3-23a87e39b759 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.102s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1010.034629] env[70020]: DEBUG nova.objects.instance [None req-b2d07f97-f941-4c9f-8cc3-23a87e39b759 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Lazy-loading 'resources' on Instance uuid 563512c2-b80f-4f14-add5-d48e2b7a0ee9 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1010.058989] env[70020]: INFO nova.scheduler.client.report [None req-d008617a-84d4-40a0-9126-35436a16c160 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Deleted allocations for instance ef85421b-b679-4f38-b052-5695baa2e405 [ 1010.114078] env[70020]: DEBUG oslo_vmware.api [None req-c88234d3-f5e6-437a-82bc-2a153915e881 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618706, 'name': PowerOffVM_Task, 'duration_secs': 0.248034} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.114381] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c88234d3-f5e6-437a-82bc-2a153915e881 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1010.114555] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c88234d3-f5e6-437a-82bc-2a153915e881 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1010.114856] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f83760b4-a89b-4972-ada4-f3f6a82ac7d0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.182488] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c88234d3-f5e6-437a-82bc-2a153915e881 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1010.182882] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c88234d3-f5e6-437a-82bc-2a153915e881 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1010.183183] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-c88234d3-f5e6-437a-82bc-2a153915e881 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Deleting the datastore file [datastore1] d45966fe-98ff-4466-8e7e-90550034742f {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1010.183577] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-81feeb17-b1ee-4c7a-9392-185c19496e68 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.190964] env[70020]: DEBUG oslo_vmware.api [None req-c88234d3-f5e6-437a-82bc-2a153915e881 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 1010.190964] env[70020]: value = "task-3618709" [ 1010.190964] env[70020]: _type = "Task" [ 1010.190964] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.198883] env[70020]: DEBUG oslo_vmware.api [None req-c88234d3-f5e6-437a-82bc-2a153915e881 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618709, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.478260] env[70020]: DEBUG oslo_vmware.api [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618707, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082486} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.478515] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1010.479292] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed9d3a83-b121-40f4-abc8-18a1336c8a23 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.501868] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] 42d20396-883d-4141-a226-61f476057cbe/42d20396-883d-4141-a226-61f476057cbe.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1010.503830] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-318622de-3fb7-4d6c-b46a-87b360abc8d5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.521948] env[70020]: DEBUG oslo_vmware.api [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 1010.521948] env[70020]: value = "task-3618710" [ 1010.521948] env[70020]: _type = "Task" [ 1010.521948] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.529214] env[70020]: DEBUG oslo_vmware.api [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618710, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.549210] env[70020]: DEBUG nova.network.neutron [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1010.569470] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d008617a-84d4-40a0-9126-35436a16c160 tempest-ListImageFiltersTestJSON-2117804784 tempest-ListImageFiltersTestJSON-2117804784-project-member] Lock "ef85421b-b679-4f38-b052-5695baa2e405" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.258s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1010.682469] env[70020]: DEBUG nova.network.neutron [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Updating instance_info_cache with network_info: [{"id": "92b2e42f-1b28-4c86-a59d-e06f2adc11da", "address": "fa:16:3e:f2:cc:c3", "network": {"id": "1f03733c-ef7b-4bed-8ce6-b27357cd0d83", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1581314653-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7252df4458bb4a1283a419877e101bf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e272539-d425-489f-9a63-aba692e88933", "external-id": "nsx-vlan-transportzone-869", "segmentation_id": 869, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92b2e42f-1b", "ovs_interfaceid": "92b2e42f-1b28-4c86-a59d-e06f2adc11da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.702399] env[70020]: DEBUG oslo_vmware.api [None req-c88234d3-f5e6-437a-82bc-2a153915e881 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618709, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157229} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.702541] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-c88234d3-f5e6-437a-82bc-2a153915e881 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1010.703352] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c88234d3-f5e6-437a-82bc-2a153915e881 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1010.703460] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c88234d3-f5e6-437a-82bc-2a153915e881 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1010.703645] env[70020]: INFO nova.compute.manager [None req-c88234d3-f5e6-437a-82bc-2a153915e881 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1010.703912] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c88234d3-f5e6-437a-82bc-2a153915e881 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1010.704558] env[70020]: DEBUG nova.compute.manager [-] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1010.704558] env[70020]: DEBUG nova.network.neutron [-] [instance: d45966fe-98ff-4466-8e7e-90550034742f] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1010.853263] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f6336b2-a377-4318-91a8-c3c9b6a240a6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.862387] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a10378d-8140-41ed-b4da-cffad2430dff {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.892052] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c936382-6238-4fdb-82da-24e5717f2360 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.899378] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6698a85d-7ba8-46a4-b07a-537764de8723 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.913786] env[70020]: DEBUG nova.compute.provider_tree [None req-b2d07f97-f941-4c9f-8cc3-23a87e39b759 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1011.031347] env[70020]: DEBUG oslo_vmware.api [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618710, 'name': ReconfigVM_Task, 'duration_secs': 0.27099} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.031649] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Reconfigured VM instance instance-00000057 to attach disk [datastore1] 42d20396-883d-4141-a226-61f476057cbe/42d20396-883d-4141-a226-61f476057cbe.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1011.032292] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d90a98ab-eef5-4966-837c-1e5daa13d2c8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.040529] env[70020]: DEBUG oslo_vmware.api [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 1011.040529] env[70020]: value = "task-3618711" [ 1011.040529] env[70020]: _type = "Task" [ 1011.040529] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.053843] env[70020]: DEBUG oslo_vmware.api [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618711, 'name': Rename_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.186033] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Releasing lock "refresh_cache-3dedfa48-0839-462e-8c32-ba5252f07ac0" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1011.186033] env[70020]: DEBUG nova.compute.manager [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Instance network_info: |[{"id": "92b2e42f-1b28-4c86-a59d-e06f2adc11da", "address": "fa:16:3e:f2:cc:c3", "network": {"id": "1f03733c-ef7b-4bed-8ce6-b27357cd0d83", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1581314653-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7252df4458bb4a1283a419877e101bf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e272539-d425-489f-9a63-aba692e88933", "external-id": "nsx-vlan-transportzone-869", "segmentation_id": 869, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92b2e42f-1b", "ovs_interfaceid": "92b2e42f-1b28-4c86-a59d-e06f2adc11da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1011.186033] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f2:cc:c3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8e272539-d425-489f-9a63-aba692e88933', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '92b2e42f-1b28-4c86-a59d-e06f2adc11da', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1011.193330] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Creating folder: Project (7252df4458bb4a1283a419877e101bf5). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1011.193884] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7b041fff-6e4f-4146-b8b1-ad60ca4f1fa6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.205132] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Created folder: Project (7252df4458bb4a1283a419877e101bf5) in parent group-v721521. [ 1011.205386] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Creating folder: Instances. Parent ref: group-v721760. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1011.205643] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-783db28c-fd18-4cc5-8098-81987b148803 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.216718] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Created folder: Instances in parent group-v721760. [ 1011.216941] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1011.217132] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1011.217329] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b57836af-a926-4501-b3fb-2a453158a0cb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.236909] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1011.236909] env[70020]: value = "task-3618714" [ 1011.236909] env[70020]: _type = "Task" [ 1011.236909] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.245629] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618714, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.418376] env[70020]: DEBUG nova.scheduler.client.report [None req-b2d07f97-f941-4c9f-8cc3-23a87e39b759 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1011.491500] env[70020]: DEBUG nova.compute.manager [req-41d66fd9-a078-4802-8b84-1aaf1da21a9f req-c9341213-85af-4835-bd2d-99b835cf5d69 service nova] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Received event network-changed-92b2e42f-1b28-4c86-a59d-e06f2adc11da {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1011.491731] env[70020]: DEBUG nova.compute.manager [req-41d66fd9-a078-4802-8b84-1aaf1da21a9f req-c9341213-85af-4835-bd2d-99b835cf5d69 service nova] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Refreshing instance network info cache due to event network-changed-92b2e42f-1b28-4c86-a59d-e06f2adc11da. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1011.492103] env[70020]: DEBUG oslo_concurrency.lockutils [req-41d66fd9-a078-4802-8b84-1aaf1da21a9f req-c9341213-85af-4835-bd2d-99b835cf5d69 service nova] Acquiring lock "refresh_cache-3dedfa48-0839-462e-8c32-ba5252f07ac0" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.492347] env[70020]: DEBUG oslo_concurrency.lockutils [req-41d66fd9-a078-4802-8b84-1aaf1da21a9f req-c9341213-85af-4835-bd2d-99b835cf5d69 service nova] Acquired lock "refresh_cache-3dedfa48-0839-462e-8c32-ba5252f07ac0" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1011.492454] env[70020]: DEBUG nova.network.neutron [req-41d66fd9-a078-4802-8b84-1aaf1da21a9f req-c9341213-85af-4835-bd2d-99b835cf5d69 service nova] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Refreshing network info cache for port 92b2e42f-1b28-4c86-a59d-e06f2adc11da {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1011.535421] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6e3dbd58-1bc5-49f1-b8b1-a14866388907 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Acquiring lock "da07cb36-244f-4f48-a5b6-8d00324c1edf" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1011.535645] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6e3dbd58-1bc5-49f1-b8b1-a14866388907 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "da07cb36-244f-4f48-a5b6-8d00324c1edf" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1011.550340] env[70020]: DEBUG oslo_vmware.api [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618711, 'name': Rename_Task, 'duration_secs': 0.143789} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.550601] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1011.550842] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-68f3f7e6-37c2-4268-9ba0-6f7be71f7a74 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.556900] env[70020]: DEBUG oslo_vmware.api [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 1011.556900] env[70020]: value = "task-3618715" [ 1011.556900] env[70020]: _type = "Task" [ 1011.556900] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.564978] env[70020]: DEBUG oslo_vmware.api [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618715, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.666843] env[70020]: DEBUG nova.network.neutron [-] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.747643] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618714, 'name': CreateVM_Task, 'duration_secs': 0.422485} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.747893] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1011.748798] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.749076] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1011.749537] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1011.749850] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9278decc-48fa-4be5-926c-3f9b6ed17936 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.756147] env[70020]: DEBUG oslo_vmware.api [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Waiting for the task: (returnval){ [ 1011.756147] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]524f9853-3928-9715-3b76-45bbba0641ce" [ 1011.756147] env[70020]: _type = "Task" [ 1011.756147] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.767053] env[70020]: DEBUG oslo_vmware.api [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]524f9853-3928-9715-3b76-45bbba0641ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.923899] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2d07f97-f941-4c9f-8cc3-23a87e39b759 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.889s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1011.926576] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4892a39f-3994-4751-97a9-f2e8e1f7d6fd tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.670s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1011.926801] env[70020]: DEBUG nova.objects.instance [None req-4892a39f-3994-4751-97a9-f2e8e1f7d6fd tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Lazy-loading 'resources' on Instance uuid 4b5750d4-98ec-4c70-b214-fad97060b606 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1011.953915] env[70020]: INFO nova.scheduler.client.report [None req-b2d07f97-f941-4c9f-8cc3-23a87e39b759 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Deleted allocations for instance 563512c2-b80f-4f14-add5-d48e2b7a0ee9 [ 1012.040179] env[70020]: DEBUG nova.compute.utils [None req-6e3dbd58-1bc5-49f1-b8b1-a14866388907 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1012.066847] env[70020]: DEBUG oslo_vmware.api [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618715, 'name': PowerOnVM_Task, 'duration_secs': 0.470264} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.067117] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1012.067724] env[70020]: INFO nova.compute.manager [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Took 7.24 seconds to spawn the instance on the hypervisor. [ 1012.067724] env[70020]: DEBUG nova.compute.manager [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1012.068316] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aca32d5-cc99-4cab-95b3-96c238560da7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.169166] env[70020]: INFO nova.compute.manager [-] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Took 1.46 seconds to deallocate network for instance. [ 1012.200774] env[70020]: DEBUG nova.network.neutron [req-41d66fd9-a078-4802-8b84-1aaf1da21a9f req-c9341213-85af-4835-bd2d-99b835cf5d69 service nova] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Updated VIF entry in instance network info cache for port 92b2e42f-1b28-4c86-a59d-e06f2adc11da. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1012.201160] env[70020]: DEBUG nova.network.neutron [req-41d66fd9-a078-4802-8b84-1aaf1da21a9f req-c9341213-85af-4835-bd2d-99b835cf5d69 service nova] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Updating instance_info_cache with network_info: [{"id": "92b2e42f-1b28-4c86-a59d-e06f2adc11da", "address": "fa:16:3e:f2:cc:c3", "network": {"id": "1f03733c-ef7b-4bed-8ce6-b27357cd0d83", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1581314653-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7252df4458bb4a1283a419877e101bf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e272539-d425-489f-9a63-aba692e88933", "external-id": "nsx-vlan-transportzone-869", "segmentation_id": 869, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92b2e42f-1b", "ovs_interfaceid": "92b2e42f-1b28-4c86-a59d-e06f2adc11da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.266678] env[70020]: DEBUG oslo_vmware.api [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]524f9853-3928-9715-3b76-45bbba0641ce, 'name': SearchDatastore_Task, 'duration_secs': 0.00924} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.266983] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1012.267249] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1012.267526] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.267738] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1012.267862] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1012.268181] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-55d25a22-bd7f-4621-9039-f707096c6c10 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.276265] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1012.276503] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1012.277437] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0a47402-b7f7-4b53-96f6-d35c6de4f8b3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.282449] env[70020]: DEBUG oslo_vmware.api [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Waiting for the task: (returnval){ [ 1012.282449] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52ad8983-58fa-41ab-b597-c1738d65925e" [ 1012.282449] env[70020]: _type = "Task" [ 1012.282449] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.290282] env[70020]: DEBUG oslo_vmware.api [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ad8983-58fa-41ab-b597-c1738d65925e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.463293] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2d07f97-f941-4c9f-8cc3-23a87e39b759 tempest-ServerExternalEventsTest-408165301 tempest-ServerExternalEventsTest-408165301-project-member] Lock "563512c2-b80f-4f14-add5-d48e2b7a0ee9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.155s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1012.543968] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6e3dbd58-1bc5-49f1-b8b1-a14866388907 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "da07cb36-244f-4f48-a5b6-8d00324c1edf" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1012.587538] env[70020]: INFO nova.compute.manager [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Took 37.41 seconds to build instance. [ 1012.676554] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c88234d3-f5e6-437a-82bc-2a153915e881 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1012.705365] env[70020]: DEBUG oslo_concurrency.lockutils [req-41d66fd9-a078-4802-8b84-1aaf1da21a9f req-c9341213-85af-4835-bd2d-99b835cf5d69 service nova] Releasing lock "refresh_cache-3dedfa48-0839-462e-8c32-ba5252f07ac0" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1012.705636] env[70020]: DEBUG nova.compute.manager [req-41d66fd9-a078-4802-8b84-1aaf1da21a9f req-c9341213-85af-4835-bd2d-99b835cf5d69 service nova] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Received event network-vif-deleted-998637c4-6d93-4002-8b56-ee2560a41b7c {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1012.705826] env[70020]: INFO nova.compute.manager [req-41d66fd9-a078-4802-8b84-1aaf1da21a9f req-c9341213-85af-4835-bd2d-99b835cf5d69 service nova] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Neutron deleted interface 998637c4-6d93-4002-8b56-ee2560a41b7c; detaching it from the instance and deleting it from the info cache [ 1012.705999] env[70020]: DEBUG nova.network.neutron [req-41d66fd9-a078-4802-8b84-1aaf1da21a9f req-c9341213-85af-4835-bd2d-99b835cf5d69 service nova] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.716305] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef59bd7d-88b0-4af1-8aa4-a5b2043a399f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.724240] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecd90fe4-9379-4b6b-9fa2-76a540437468 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.755710] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5be560ee-a1f6-4620-b9c9-c4af620e7bda {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.762856] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2399dd6-3f5c-4ca2-9a3a-db6b179d7610 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.776618] env[70020]: DEBUG nova.compute.provider_tree [None req-4892a39f-3994-4751-97a9-f2e8e1f7d6fd tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1012.791292] env[70020]: DEBUG oslo_vmware.api [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ad8983-58fa-41ab-b597-c1738d65925e, 'name': SearchDatastore_Task, 'duration_secs': 0.007738} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.792597] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52420813-d90a-43ac-9a7f-038644243065 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.798147] env[70020]: DEBUG oslo_vmware.api [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Waiting for the task: (returnval){ [ 1012.798147] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5218bae5-afc7-32fe-e36c-7c6067e4919d" [ 1012.798147] env[70020]: _type = "Task" [ 1012.798147] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.805954] env[70020]: DEBUG oslo_vmware.api [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5218bae5-afc7-32fe-e36c-7c6067e4919d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.869475] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Acquiring lock "1ddd5a29-075b-482a-a6e9-4c7345673a00" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1012.869705] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Lock "1ddd5a29-075b-482a-a6e9-4c7345673a00" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1013.088815] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9baf59d0-3ba2-42b1-a4e7-f10a5f016d33 tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "42d20396-883d-4141-a226-61f476057cbe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.930s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1013.209143] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e670c67b-c715-48ac-b5a1-6d59be137494 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.219125] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bc57d9f-1b1c-4e40-9e91-5747e38f1737 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.252417] env[70020]: DEBUG nova.compute.manager [req-41d66fd9-a078-4802-8b84-1aaf1da21a9f req-c9341213-85af-4835-bd2d-99b835cf5d69 service nova] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Detach interface failed, port_id=998637c4-6d93-4002-8b56-ee2560a41b7c, reason: Instance d45966fe-98ff-4466-8e7e-90550034742f could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1013.279871] env[70020]: DEBUG nova.scheduler.client.report [None req-4892a39f-3994-4751-97a9-f2e8e1f7d6fd tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1013.309556] env[70020]: DEBUG oslo_vmware.api [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5218bae5-afc7-32fe-e36c-7c6067e4919d, 'name': SearchDatastore_Task, 'duration_secs': 0.010433} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.309907] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1013.310872] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 3dedfa48-0839-462e-8c32-ba5252f07ac0/3dedfa48-0839-462e-8c32-ba5252f07ac0.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1013.310872] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-956cba31-612b-4d15-8c81-eac9014d90ef {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.317185] env[70020]: DEBUG oslo_vmware.api [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Waiting for the task: (returnval){ [ 1013.317185] env[70020]: value = "task-3618716" [ 1013.317185] env[70020]: _type = "Task" [ 1013.317185] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.325605] env[70020]: DEBUG oslo_vmware.api [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Task: {'id': task-3618716, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.371537] env[70020]: DEBUG nova.compute.manager [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1013.599445] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6e3dbd58-1bc5-49f1-b8b1-a14866388907 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Acquiring lock "da07cb36-244f-4f48-a5b6-8d00324c1edf" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1013.599445] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6e3dbd58-1bc5-49f1-b8b1-a14866388907 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "da07cb36-244f-4f48-a5b6-8d00324c1edf" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1013.599756] env[70020]: INFO nova.compute.manager [None req-6e3dbd58-1bc5-49f1-b8b1-a14866388907 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Attaching volume 53da2c84-5ce4-4b98-93c7-9fe8956ff162 to /dev/sdb [ 1013.642128] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ca131af-9039-4bab-bc7a-fc4b19fcb5d8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.649923] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a61861c-accf-4a2a-8a0b-bbeccad480b5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.664909] env[70020]: DEBUG nova.virt.block_device [None req-6e3dbd58-1bc5-49f1-b8b1-a14866388907 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Updating existing volume attachment record: 55ada206-7ee2-40b4-8138-24a9c96a5093 {{(pid=70020) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1013.787508] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4892a39f-3994-4751-97a9-f2e8e1f7d6fd tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.861s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1013.790018] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 26.226s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1013.790250] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1013.790680] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=70020) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1013.790787] env[70020]: DEBUG oslo_concurrency.lockutils [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.999s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1013.792483] env[70020]: INFO nova.compute.claims [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1013.797050] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09c6277d-639c-4cb4-adf0-b3e2dbebbfd3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.809618] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5df0da0b-25d4-440f-92b2-7840ee24161d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.820932] env[70020]: INFO nova.scheduler.client.report [None req-4892a39f-3994-4751-97a9-f2e8e1f7d6fd tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Deleted allocations for instance 4b5750d4-98ec-4c70-b214-fad97060b606 [ 1013.839303] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c31af58-466e-4467-b86b-1f7d15103885 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.848660] env[70020]: DEBUG oslo_vmware.api [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Task: {'id': task-3618716, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.499935} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.850373] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 3dedfa48-0839-462e-8c32-ba5252f07ac0/3dedfa48-0839-462e-8c32-ba5252f07ac0.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1013.850604] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1013.850883] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-36d3bef2-ed19-4305-a741-81a6a9fc83ac {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.853478] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65026dc4-488a-48de-a653-453c807437c5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.891994] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178985MB free_disk=75GB free_vcpus=48 pci_devices=None {{(pid=70020) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1013.892191] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1013.897280] env[70020]: DEBUG oslo_vmware.api [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Waiting for the task: (returnval){ [ 1013.897280] env[70020]: value = "task-3618718" [ 1013.897280] env[70020]: _type = "Task" [ 1013.897280] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.906459] env[70020]: DEBUG oslo_vmware.api [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Task: {'id': task-3618718, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.907854] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1013.922387] env[70020]: DEBUG nova.compute.manager [None req-c8762224-966d-4066-85bb-f5163bba00db tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1013.923363] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a21e057-97ab-4dc9-a6a9-d6772c7c70e5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.343562] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4892a39f-3994-4751-97a9-f2e8e1f7d6fd tempest-MigrationsAdminTest-81604366 tempest-MigrationsAdminTest-81604366-project-member] Lock "4b5750d4-98ec-4c70-b214-fad97060b606" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.520s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1014.408899] env[70020]: DEBUG oslo_vmware.api [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Task: {'id': task-3618718, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.097905} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.409179] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1014.409941] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d4466f4-373e-40e5-81ce-1ea5ba6eaad6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.431950] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] 3dedfa48-0839-462e-8c32-ba5252f07ac0/3dedfa48-0839-462e-8c32-ba5252f07ac0.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1014.433284] env[70020]: INFO nova.compute.manager [None req-c8762224-966d-4066-85bb-f5163bba00db tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] instance snapshotting [ 1014.435271] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ea38fe06-d513-463d-b0c1-e04e7c262214 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.450338] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4128123-5b6e-4846-97a6-688ffd47d940 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.471616] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-429a02f2-a956-45bf-9155-d78a26476c14 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.475156] env[70020]: DEBUG oslo_vmware.api [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Waiting for the task: (returnval){ [ 1014.475156] env[70020]: value = "task-3618719" [ 1014.475156] env[70020]: _type = "Task" [ 1014.475156] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.489374] env[70020]: DEBUG oslo_vmware.api [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Task: {'id': task-3618719, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.986201] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c8762224-966d-4066-85bb-f5163bba00db tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Creating Snapshot of the VM instance {{(pid=70020) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1014.986201] env[70020]: DEBUG oslo_vmware.api [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Task: {'id': task-3618719, 'name': ReconfigVM_Task, 'duration_secs': 0.322878} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.986496] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c24a5d3e-2112-4d93-a0a2-c5f4a97914ac {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.988184] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Reconfigured VM instance instance-00000058 to attach disk [datastore1] 3dedfa48-0839-462e-8c32-ba5252f07ac0/3dedfa48-0839-462e-8c32-ba5252f07ac0.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1014.990941] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7c58b9d0-60dc-434a-b65d-6f4671e45b0f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.998675] env[70020]: DEBUG oslo_vmware.api [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Waiting for the task: (returnval){ [ 1014.998675] env[70020]: value = "task-3618721" [ 1014.998675] env[70020]: _type = "Task" [ 1014.998675] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.998844] env[70020]: DEBUG oslo_vmware.api [None req-c8762224-966d-4066-85bb-f5163bba00db tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 1014.998844] env[70020]: value = "task-3618720" [ 1014.998844] env[70020]: _type = "Task" [ 1014.998844] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.011870] env[70020]: DEBUG oslo_vmware.api [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Task: {'id': task-3618721, 'name': Rename_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.015129] env[70020]: DEBUG oslo_vmware.api [None req-c8762224-966d-4066-85bb-f5163bba00db tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618720, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.101241] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69bd6416-5dc2-488e-8be6-f8585c001f8b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.109170] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c5a5bea-1c0b-49f3-aebf-175f586c98ba {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.142217] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04cd598b-0f26-458b-987d-082a90816508 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.149660] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f329bb8-3312-4e8e-bebd-6d483eeef2a5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.162924] env[70020]: DEBUG nova.compute.provider_tree [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1015.516781] env[70020]: DEBUG oslo_vmware.api [None req-c8762224-966d-4066-85bb-f5163bba00db tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618720, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.518568] env[70020]: DEBUG oslo_vmware.api [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Task: {'id': task-3618721, 'name': Rename_Task, 'duration_secs': 0.139812} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.518826] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1015.519060] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-303100a2-9d6b-44cf-bd53-137254303e84 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.525085] env[70020]: DEBUG oslo_vmware.api [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Waiting for the task: (returnval){ [ 1015.525085] env[70020]: value = "task-3618722" [ 1015.525085] env[70020]: _type = "Task" [ 1015.525085] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.532393] env[70020]: DEBUG oslo_vmware.api [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Task: {'id': task-3618722, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.666658] env[70020]: DEBUG nova.scheduler.client.report [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1016.019614] env[70020]: DEBUG oslo_vmware.api [None req-c8762224-966d-4066-85bb-f5163bba00db tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618720, 'name': CreateSnapshot_Task, 'duration_secs': 0.876873} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.021087] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c8762224-966d-4066-85bb-f5163bba00db tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Created Snapshot of the VM instance {{(pid=70020) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1016.021087] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7409e2f-d5e1-4f43-8ccd-f7c21754473d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.041124] env[70020]: DEBUG oslo_vmware.api [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Task: {'id': task-3618722, 'name': PowerOnVM_Task, 'duration_secs': 0.496236} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.041429] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1016.041702] env[70020]: INFO nova.compute.manager [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Took 6.89 seconds to spawn the instance on the hypervisor. [ 1016.041919] env[70020]: DEBUG nova.compute.manager [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1016.042733] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e90e9f0b-5a2a-43a4-905b-7b9531e76bdf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.172086] env[70020]: DEBUG oslo_concurrency.lockutils [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.381s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1016.172613] env[70020]: DEBUG nova.compute.manager [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1016.175499] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.802s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1016.177385] env[70020]: INFO nova.compute.claims [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1016.540518] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c8762224-966d-4066-85bb-f5163bba00db tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Creating linked-clone VM from snapshot {{(pid=70020) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1016.541249] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-c2ff3dc8-e53e-4d70-b1e9-6fc501172f81 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.549658] env[70020]: DEBUG oslo_vmware.api [None req-c8762224-966d-4066-85bb-f5163bba00db tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 1016.549658] env[70020]: value = "task-3618724" [ 1016.549658] env[70020]: _type = "Task" [ 1016.549658] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.567434] env[70020]: DEBUG oslo_vmware.api [None req-c8762224-966d-4066-85bb-f5163bba00db tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618724, 'name': CloneVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.568060] env[70020]: INFO nova.compute.manager [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Took 37.79 seconds to build instance. [ 1016.682496] env[70020]: DEBUG nova.compute.utils [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1016.689245] env[70020]: DEBUG nova.compute.manager [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1016.689245] env[70020]: DEBUG nova.network.neutron [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1016.976669] env[70020]: DEBUG nova.policy [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f145e14948f241829c262c46c5321c28', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6c8373e835ad4420890442390872c6fe', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1017.061644] env[70020]: DEBUG oslo_vmware.api [None req-c8762224-966d-4066-85bb-f5163bba00db tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618724, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.071233] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0d8d801a-d823-41e6-862c-54ddf5d84421 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Lock "3dedfa48-0839-462e-8c32-ba5252f07ac0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.306s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1017.187238] env[70020]: DEBUG nova.compute.manager [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1017.457431] env[70020]: DEBUG nova.network.neutron [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Successfully created port: 02386321-e9cb-45ce-b235-d3c121d3cff1 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1017.563994] env[70020]: DEBUG oslo_vmware.api [None req-c8762224-966d-4066-85bb-f5163bba00db tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618724, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.579192] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3adc19b-dab2-426d-98a8-8b3fe772ae9d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.587021] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3fbac8b-7150-4b27-928e-1a0a549651f2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.622508] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec6a7f2f-78a1-443a-9207-8beb603aa915 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.632743] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-012ea4c7-0684-41c3-950b-686296446768 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.646701] env[70020]: DEBUG nova.compute.provider_tree [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1017.898594] env[70020]: DEBUG nova.compute.manager [req-89ef3978-0719-4817-833e-5ca14feb4068 req-a5b86136-a211-40f2-a18d-ed9653f52ca8 service nova] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Received event network-changed-92b2e42f-1b28-4c86-a59d-e06f2adc11da {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1017.898594] env[70020]: DEBUG nova.compute.manager [req-89ef3978-0719-4817-833e-5ca14feb4068 req-a5b86136-a211-40f2-a18d-ed9653f52ca8 service nova] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Refreshing instance network info cache due to event network-changed-92b2e42f-1b28-4c86-a59d-e06f2adc11da. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1017.898594] env[70020]: DEBUG oslo_concurrency.lockutils [req-89ef3978-0719-4817-833e-5ca14feb4068 req-a5b86136-a211-40f2-a18d-ed9653f52ca8 service nova] Acquiring lock "refresh_cache-3dedfa48-0839-462e-8c32-ba5252f07ac0" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.898594] env[70020]: DEBUG oslo_concurrency.lockutils [req-89ef3978-0719-4817-833e-5ca14feb4068 req-a5b86136-a211-40f2-a18d-ed9653f52ca8 service nova] Acquired lock "refresh_cache-3dedfa48-0839-462e-8c32-ba5252f07ac0" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1017.898594] env[70020]: DEBUG nova.network.neutron [req-89ef3978-0719-4817-833e-5ca14feb4068 req-a5b86136-a211-40f2-a18d-ed9653f52ca8 service nova] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Refreshing network info cache for port 92b2e42f-1b28-4c86-a59d-e06f2adc11da {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1018.064271] env[70020]: DEBUG oslo_vmware.api [None req-c8762224-966d-4066-85bb-f5163bba00db tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618724, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.152519] env[70020]: DEBUG nova.scheduler.client.report [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1018.202687] env[70020]: DEBUG nova.compute.manager [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1018.220438] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e3dbd58-1bc5-49f1-b8b1-a14866388907 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Volume attach. Driver type: vmdk {{(pid=70020) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1018.220690] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e3dbd58-1bc5-49f1-b8b1-a14866388907 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721763', 'volume_id': '53da2c84-5ce4-4b98-93c7-9fe8956ff162', 'name': 'volume-53da2c84-5ce4-4b98-93c7-9fe8956ff162', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'da07cb36-244f-4f48-a5b6-8d00324c1edf', 'attached_at': '', 'detached_at': '', 'volume_id': '53da2c84-5ce4-4b98-93c7-9fe8956ff162', 'serial': '53da2c84-5ce4-4b98-93c7-9fe8956ff162'} {{(pid=70020) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1018.222722] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d671f95-e16c-4fc9-9f62-9eb5b3c0f1ad {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.245055] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dda5e77-ab3f-41b1-94d4-05bdd64ed869 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.270080] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e3dbd58-1bc5-49f1-b8b1-a14866388907 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] volume-53da2c84-5ce4-4b98-93c7-9fe8956ff162/volume-53da2c84-5ce4-4b98-93c7-9fe8956ff162.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1018.273136] env[70020]: DEBUG nova.virt.hardware [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1018.273377] env[70020]: DEBUG nova.virt.hardware [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1018.273532] env[70020]: DEBUG nova.virt.hardware [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1018.273729] env[70020]: DEBUG nova.virt.hardware [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1018.273844] env[70020]: DEBUG nova.virt.hardware [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1018.273989] env[70020]: DEBUG nova.virt.hardware [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1018.274217] env[70020]: DEBUG nova.virt.hardware [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1018.274440] env[70020]: DEBUG nova.virt.hardware [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1018.274630] env[70020]: DEBUG nova.virt.hardware [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1018.274777] env[70020]: DEBUG nova.virt.hardware [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1018.274947] env[70020]: DEBUG nova.virt.hardware [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1018.275760] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-949f0cff-0a46-4c39-9981-bb6d6c5108a2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.290101] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60a2986a-94e2-4eb2-8319-670dfed96e49 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.299537] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8cf9a7a-9134-4142-8e56-c24619956bfc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.303651] env[70020]: DEBUG oslo_vmware.api [None req-6e3dbd58-1bc5-49f1-b8b1-a14866388907 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for the task: (returnval){ [ 1018.303651] env[70020]: value = "task-3618725" [ 1018.303651] env[70020]: _type = "Task" [ 1018.303651] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.319545] env[70020]: DEBUG oslo_vmware.api [None req-6e3dbd58-1bc5-49f1-b8b1-a14866388907 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618725, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.563805] env[70020]: DEBUG oslo_vmware.api [None req-c8762224-966d-4066-85bb-f5163bba00db tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618724, 'name': CloneVM_Task} progress is 95%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.658612] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.483s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1018.659200] env[70020]: DEBUG nova.compute.manager [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1018.661921] env[70020]: DEBUG oslo_concurrency.lockutils [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.888s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1018.663606] env[70020]: INFO nova.compute.claims [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1018.762241] env[70020]: DEBUG nova.network.neutron [req-89ef3978-0719-4817-833e-5ca14feb4068 req-a5b86136-a211-40f2-a18d-ed9653f52ca8 service nova] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Updated VIF entry in instance network info cache for port 92b2e42f-1b28-4c86-a59d-e06f2adc11da. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1018.762690] env[70020]: DEBUG nova.network.neutron [req-89ef3978-0719-4817-833e-5ca14feb4068 req-a5b86136-a211-40f2-a18d-ed9653f52ca8 service nova] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Updating instance_info_cache with network_info: [{"id": "92b2e42f-1b28-4c86-a59d-e06f2adc11da", "address": "fa:16:3e:f2:cc:c3", "network": {"id": "1f03733c-ef7b-4bed-8ce6-b27357cd0d83", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1581314653-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7252df4458bb4a1283a419877e101bf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e272539-d425-489f-9a63-aba692e88933", "external-id": "nsx-vlan-transportzone-869", "segmentation_id": 869, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92b2e42f-1b", "ovs_interfaceid": "92b2e42f-1b28-4c86-a59d-e06f2adc11da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1018.814938] env[70020]: DEBUG oslo_vmware.api [None req-6e3dbd58-1bc5-49f1-b8b1-a14866388907 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618725, 'name': ReconfigVM_Task, 'duration_secs': 0.439761} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.815294] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e3dbd58-1bc5-49f1-b8b1-a14866388907 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Reconfigured VM instance instance-00000052 to attach disk [datastore1] volume-53da2c84-5ce4-4b98-93c7-9fe8956ff162/volume-53da2c84-5ce4-4b98-93c7-9fe8956ff162.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1018.820408] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-76ad6e92-1cb2-411f-a4ac-64767d6c2195 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.839598] env[70020]: DEBUG oslo_vmware.api [None req-6e3dbd58-1bc5-49f1-b8b1-a14866388907 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for the task: (returnval){ [ 1018.839598] env[70020]: value = "task-3618726" [ 1018.839598] env[70020]: _type = "Task" [ 1018.839598] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.850494] env[70020]: DEBUG oslo_vmware.api [None req-6e3dbd58-1bc5-49f1-b8b1-a14866388907 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618726, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.067262] env[70020]: DEBUG oslo_vmware.api [None req-c8762224-966d-4066-85bb-f5163bba00db tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618724, 'name': CloneVM_Task, 'duration_secs': 2.379962} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.067580] env[70020]: INFO nova.virt.vmwareapi.vmops [None req-c8762224-966d-4066-85bb-f5163bba00db tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Created linked-clone VM from snapshot [ 1019.068332] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4280b033-e320-44ef-b86d-6a81701ee51b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.072997] env[70020]: DEBUG nova.compute.manager [req-f0c3b968-66df-4e3a-9419-bb94b791d3ec req-47d19196-2701-4de8-be40-85cc183d5d75 service nova] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Received event network-vif-plugged-02386321-e9cb-45ce-b235-d3c121d3cff1 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1019.073332] env[70020]: DEBUG oslo_concurrency.lockutils [req-f0c3b968-66df-4e3a-9419-bb94b791d3ec req-47d19196-2701-4de8-be40-85cc183d5d75 service nova] Acquiring lock "d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1019.073569] env[70020]: DEBUG oslo_concurrency.lockutils [req-f0c3b968-66df-4e3a-9419-bb94b791d3ec req-47d19196-2701-4de8-be40-85cc183d5d75 service nova] Lock "d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1019.073740] env[70020]: DEBUG oslo_concurrency.lockutils [req-f0c3b968-66df-4e3a-9419-bb94b791d3ec req-47d19196-2701-4de8-be40-85cc183d5d75 service nova] Lock "d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1019.073904] env[70020]: DEBUG nova.compute.manager [req-f0c3b968-66df-4e3a-9419-bb94b791d3ec req-47d19196-2701-4de8-be40-85cc183d5d75 service nova] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] No waiting events found dispatching network-vif-plugged-02386321-e9cb-45ce-b235-d3c121d3cff1 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1019.074100] env[70020]: WARNING nova.compute.manager [req-f0c3b968-66df-4e3a-9419-bb94b791d3ec req-47d19196-2701-4de8-be40-85cc183d5d75 service nova] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Received unexpected event network-vif-plugged-02386321-e9cb-45ce-b235-d3c121d3cff1 for instance with vm_state building and task_state spawning. [ 1019.083070] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-c8762224-966d-4066-85bb-f5163bba00db tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Uploading image 0c50bc99-3fd8-4b6c-b0e6-fda58d55cc67 {{(pid=70020) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1019.096656] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8762224-966d-4066-85bb-f5163bba00db tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Destroying the VM {{(pid=70020) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1019.096934] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-d67635f4-4ca6-4a46-8df7-064c19580f0b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.108206] env[70020]: DEBUG oslo_vmware.api [None req-c8762224-966d-4066-85bb-f5163bba00db tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 1019.108206] env[70020]: value = "task-3618727" [ 1019.108206] env[70020]: _type = "Task" [ 1019.108206] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.116551] env[70020]: DEBUG oslo_vmware.api [None req-c8762224-966d-4066-85bb-f5163bba00db tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618727, 'name': Destroy_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.162712] env[70020]: DEBUG nova.network.neutron [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Successfully updated port: 02386321-e9cb-45ce-b235-d3c121d3cff1 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1019.170020] env[70020]: DEBUG nova.compute.utils [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1019.171426] env[70020]: DEBUG nova.compute.manager [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1019.171711] env[70020]: DEBUG nova.network.neutron [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1019.217056] env[70020]: DEBUG nova.policy [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1ebabdad8aa843f28165fcd167382c60', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cfa7d3b1f5a14c60b19cde5030c2f0a2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1019.269745] env[70020]: DEBUG oslo_concurrency.lockutils [req-89ef3978-0719-4817-833e-5ca14feb4068 req-a5b86136-a211-40f2-a18d-ed9653f52ca8 service nova] Releasing lock "refresh_cache-3dedfa48-0839-462e-8c32-ba5252f07ac0" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1019.351127] env[70020]: DEBUG oslo_vmware.api [None req-6e3dbd58-1bc5-49f1-b8b1-a14866388907 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618726, 'name': ReconfigVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.424288] env[70020]: DEBUG oslo_concurrency.lockutils [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Acquiring lock "97fe6c57-03de-4cf8-a990-ff4f88db6cd7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1019.424801] env[70020]: DEBUG oslo_concurrency.lockutils [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Lock "97fe6c57-03de-4cf8-a990-ff4f88db6cd7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1019.519103] env[70020]: DEBUG nova.network.neutron [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Successfully created port: f3bb6c02-a473-447c-a316-a09dfd62af88 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1019.616855] env[70020]: DEBUG oslo_vmware.api [None req-c8762224-966d-4066-85bb-f5163bba00db tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618727, 'name': Destroy_Task} progress is 33%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.664617] env[70020]: DEBUG oslo_concurrency.lockutils [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Acquiring lock "refresh_cache-d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.664775] env[70020]: DEBUG oslo_concurrency.lockutils [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Acquired lock "refresh_cache-d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1019.664932] env[70020]: DEBUG nova.network.neutron [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1019.674999] env[70020]: DEBUG nova.compute.manager [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1019.850528] env[70020]: DEBUG oslo_vmware.api [None req-6e3dbd58-1bc5-49f1-b8b1-a14866388907 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618726, 'name': ReconfigVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.930925] env[70020]: DEBUG nova.compute.manager [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1019.997277] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2426b0f8-dc96-48a5-98aa-7060541b5428 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "3a4f2342-58e7-436b-a779-0fa093b52409" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1019.997412] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2426b0f8-dc96-48a5-98aa-7060541b5428 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "3a4f2342-58e7-436b-a779-0fa093b52409" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1019.997617] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2426b0f8-dc96-48a5-98aa-7060541b5428 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "3a4f2342-58e7-436b-a779-0fa093b52409-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1019.997799] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2426b0f8-dc96-48a5-98aa-7060541b5428 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "3a4f2342-58e7-436b-a779-0fa093b52409-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1019.997966] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2426b0f8-dc96-48a5-98aa-7060541b5428 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "3a4f2342-58e7-436b-a779-0fa093b52409-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1020.005023] env[70020]: INFO nova.compute.manager [None req-2426b0f8-dc96-48a5-98aa-7060541b5428 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Terminating instance [ 1020.050713] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4953d3f2-53f7-4618-9432-693b96b02068 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.058902] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5d40c3e-dad9-4829-8dd0-ba532d5c2fab {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.092102] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dc61e69-1261-42fb-929a-f1a83610a316 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.100424] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29eda913-762f-49cc-a48d-a5265e505e95 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.114255] env[70020]: DEBUG nova.compute.provider_tree [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1020.125171] env[70020]: DEBUG oslo_vmware.api [None req-c8762224-966d-4066-85bb-f5163bba00db tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618727, 'name': Destroy_Task, 'duration_secs': 0.728517} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.126034] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-c8762224-966d-4066-85bb-f5163bba00db tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Destroyed the VM [ 1020.126305] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c8762224-966d-4066-85bb-f5163bba00db tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Deleting Snapshot of the VM instance {{(pid=70020) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1020.126560] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-de1e7576-3f51-43b9-87f9-57e3f9b961bd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.133185] env[70020]: DEBUG oslo_vmware.api [None req-c8762224-966d-4066-85bb-f5163bba00db tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 1020.133185] env[70020]: value = "task-3618728" [ 1020.133185] env[70020]: _type = "Task" [ 1020.133185] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.141254] env[70020]: DEBUG oslo_vmware.api [None req-c8762224-966d-4066-85bb-f5163bba00db tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618728, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.201733] env[70020]: DEBUG nova.network.neutron [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1020.337108] env[70020]: DEBUG nova.network.neutron [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Updating instance_info_cache with network_info: [{"id": "02386321-e9cb-45ce-b235-d3c121d3cff1", "address": "fa:16:3e:c5:9d:a4", "network": {"id": "a4584b23-3c15-4ae9-b89e-0a0e14eeccb8", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1275535043-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6c8373e835ad4420890442390872c6fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02386321-e9", "ovs_interfaceid": "02386321-e9cb-45ce-b235-d3c121d3cff1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1020.351955] env[70020]: DEBUG oslo_vmware.api [None req-6e3dbd58-1bc5-49f1-b8b1-a14866388907 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618726, 'name': ReconfigVM_Task, 'duration_secs': 1.167171} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.352305] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e3dbd58-1bc5-49f1-b8b1-a14866388907 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721763', 'volume_id': '53da2c84-5ce4-4b98-93c7-9fe8956ff162', 'name': 'volume-53da2c84-5ce4-4b98-93c7-9fe8956ff162', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'da07cb36-244f-4f48-a5b6-8d00324c1edf', 'attached_at': '', 'detached_at': '', 'volume_id': '53da2c84-5ce4-4b98-93c7-9fe8956ff162', 'serial': '53da2c84-5ce4-4b98-93c7-9fe8956ff162'} {{(pid=70020) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1020.454854] env[70020]: DEBUG oslo_concurrency.lockutils [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1020.514148] env[70020]: DEBUG nova.compute.manager [None req-2426b0f8-dc96-48a5-98aa-7060541b5428 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1020.514148] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2426b0f8-dc96-48a5-98aa-7060541b5428 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1020.516404] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a8be1e2-ac7e-4edb-ae0b-a76d09d992c4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.523076] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2426b0f8-dc96-48a5-98aa-7060541b5428 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1020.523376] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3c7a86b0-3c2e-4886-b49b-916000189b6a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.532938] env[70020]: DEBUG oslo_vmware.api [None req-2426b0f8-dc96-48a5-98aa-7060541b5428 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1020.532938] env[70020]: value = "task-3618729" [ 1020.532938] env[70020]: _type = "Task" [ 1020.532938] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.543030] env[70020]: DEBUG oslo_vmware.api [None req-2426b0f8-dc96-48a5-98aa-7060541b5428 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618729, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.620653] env[70020]: DEBUG nova.scheduler.client.report [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1020.646792] env[70020]: DEBUG oslo_vmware.api [None req-c8762224-966d-4066-85bb-f5163bba00db tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618728, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.692819] env[70020]: DEBUG nova.compute.manager [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1020.725319] env[70020]: DEBUG nova.virt.hardware [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1020.725796] env[70020]: DEBUG nova.virt.hardware [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1020.726070] env[70020]: DEBUG nova.virt.hardware [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1020.726391] env[70020]: DEBUG nova.virt.hardware [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1020.726651] env[70020]: DEBUG nova.virt.hardware [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1020.726896] env[70020]: DEBUG nova.virt.hardware [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1020.727235] env[70020]: DEBUG nova.virt.hardware [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1020.727493] env[70020]: DEBUG nova.virt.hardware [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1020.727767] env[70020]: DEBUG nova.virt.hardware [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1020.728268] env[70020]: DEBUG nova.virt.hardware [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1020.728268] env[70020]: DEBUG nova.virt.hardware [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1020.730190] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44bd7c01-930e-4ca1-807c-94fec4ed5f4b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.742270] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a852e76-1afc-4b47-b6a7-717c34d46513 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.841076] env[70020]: DEBUG oslo_concurrency.lockutils [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Releasing lock "refresh_cache-d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1020.841076] env[70020]: DEBUG nova.compute.manager [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Instance network_info: |[{"id": "02386321-e9cb-45ce-b235-d3c121d3cff1", "address": "fa:16:3e:c5:9d:a4", "network": {"id": "a4584b23-3c15-4ae9-b89e-0a0e14eeccb8", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1275535043-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6c8373e835ad4420890442390872c6fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02386321-e9", "ovs_interfaceid": "02386321-e9cb-45ce-b235-d3c121d3cff1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1020.841308] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c5:9d:a4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '02386321-e9cb-45ce-b235-d3c121d3cff1', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1020.850350] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1020.850350] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1020.850478] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6f9f6839-b3c0-4125-bd91-d6bf5719b9e8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.872915] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1020.872915] env[70020]: value = "task-3618730" [ 1020.872915] env[70020]: _type = "Task" [ 1020.872915] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.880976] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618730, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.035090] env[70020]: DEBUG nova.network.neutron [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Successfully updated port: f3bb6c02-a473-447c-a316-a09dfd62af88 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1021.047127] env[70020]: DEBUG oslo_vmware.api [None req-2426b0f8-dc96-48a5-98aa-7060541b5428 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618729, 'name': PowerOffVM_Task, 'duration_secs': 0.445139} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.048829] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2426b0f8-dc96-48a5-98aa-7060541b5428 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1021.049289] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2426b0f8-dc96-48a5-98aa-7060541b5428 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1021.049970] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4cdbfea1-4e4a-4fea-a26a-5a28301faaa3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.118801] env[70020]: DEBUG nova.compute.manager [req-bd2e9cd4-5553-4b1a-ba87-d2575f7af180 req-1ae14e97-27fd-47a7-8ed6-2e3458065771 service nova] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Received event network-changed-02386321-e9cb-45ce-b235-d3c121d3cff1 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1021.120025] env[70020]: DEBUG nova.compute.manager [req-bd2e9cd4-5553-4b1a-ba87-d2575f7af180 req-1ae14e97-27fd-47a7-8ed6-2e3458065771 service nova] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Refreshing instance network info cache due to event network-changed-02386321-e9cb-45ce-b235-d3c121d3cff1. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1021.120025] env[70020]: DEBUG oslo_concurrency.lockutils [req-bd2e9cd4-5553-4b1a-ba87-d2575f7af180 req-1ae14e97-27fd-47a7-8ed6-2e3458065771 service nova] Acquiring lock "refresh_cache-d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1021.120025] env[70020]: DEBUG oslo_concurrency.lockutils [req-bd2e9cd4-5553-4b1a-ba87-d2575f7af180 req-1ae14e97-27fd-47a7-8ed6-2e3458065771 service nova] Acquired lock "refresh_cache-d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1021.120025] env[70020]: DEBUG nova.network.neutron [req-bd2e9cd4-5553-4b1a-ba87-d2575f7af180 req-1ae14e97-27fd-47a7-8ed6-2e3458065771 service nova] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Refreshing network info cache for port 02386321-e9cb-45ce-b235-d3c121d3cff1 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1021.122563] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2426b0f8-dc96-48a5-98aa-7060541b5428 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1021.122790] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2426b0f8-dc96-48a5-98aa-7060541b5428 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1021.123008] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-2426b0f8-dc96-48a5-98aa-7060541b5428 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Deleting the datastore file [datastore2] 3a4f2342-58e7-436b-a779-0fa093b52409 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1021.123831] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-22f97cb4-ad40-40a4-a844-125d00418947 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.127307] env[70020]: DEBUG oslo_concurrency.lockutils [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.465s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1021.127808] env[70020]: DEBUG nova.compute.manager [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1021.130679] env[70020]: DEBUG oslo_concurrency.lockutils [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.696s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1021.130979] env[70020]: DEBUG nova.objects.instance [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Lazy-loading 'resources' on Instance uuid 58dded95-033a-46d7-b02e-5b2f2551234c {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1021.139600] env[70020]: DEBUG oslo_vmware.api [None req-2426b0f8-dc96-48a5-98aa-7060541b5428 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1021.139600] env[70020]: value = "task-3618732" [ 1021.139600] env[70020]: _type = "Task" [ 1021.139600] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.146478] env[70020]: DEBUG oslo_vmware.api [None req-c8762224-966d-4066-85bb-f5163bba00db tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618728, 'name': RemoveSnapshot_Task, 'duration_secs': 0.833381} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.147127] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c8762224-966d-4066-85bb-f5163bba00db tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Deleted Snapshot of the VM instance {{(pid=70020) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1021.153395] env[70020]: DEBUG oslo_vmware.api [None req-2426b0f8-dc96-48a5-98aa-7060541b5428 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618732, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.383062] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618730, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.403550] env[70020]: DEBUG nova.objects.instance [None req-6e3dbd58-1bc5-49f1-b8b1-a14866388907 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lazy-loading 'flavor' on Instance uuid da07cb36-244f-4f48-a5b6-8d00324c1edf {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1021.536457] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "refresh_cache-056141e3-5628-4451-bd25-f4fa15edd11e" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1021.536457] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquired lock "refresh_cache-056141e3-5628-4451-bd25-f4fa15edd11e" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1021.536457] env[70020]: DEBUG nova.network.neutron [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1021.636848] env[70020]: DEBUG nova.compute.utils [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1021.640908] env[70020]: DEBUG nova.compute.manager [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1021.641627] env[70020]: DEBUG nova.network.neutron [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1021.652608] env[70020]: DEBUG oslo_vmware.api [None req-2426b0f8-dc96-48a5-98aa-7060541b5428 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618732, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154328} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.653019] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-2426b0f8-dc96-48a5-98aa-7060541b5428 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1021.653019] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2426b0f8-dc96-48a5-98aa-7060541b5428 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1021.653199] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2426b0f8-dc96-48a5-98aa-7060541b5428 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1021.653485] env[70020]: INFO nova.compute.manager [None req-2426b0f8-dc96-48a5-98aa-7060541b5428 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1021.653645] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2426b0f8-dc96-48a5-98aa-7060541b5428 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1021.653846] env[70020]: DEBUG nova.compute.manager [-] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1021.653937] env[70020]: DEBUG nova.network.neutron [-] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1021.656732] env[70020]: WARNING nova.compute.manager [None req-c8762224-966d-4066-85bb-f5163bba00db tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Image not found during snapshot: nova.exception.ImageNotFound: Image 0c50bc99-3fd8-4b6c-b0e6-fda58d55cc67 could not be found. [ 1021.694766] env[70020]: DEBUG nova.policy [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '025d293d3c0449e1b36a7aa465ad1110', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bd3733a000724aab9255cb498cecdfba', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1021.884490] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618730, 'name': CreateVM_Task, 'duration_secs': 0.608763} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.884686] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1021.885451] env[70020]: DEBUG oslo_concurrency.lockutils [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1021.885712] env[70020]: DEBUG oslo_concurrency.lockutils [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1021.885943] env[70020]: DEBUG oslo_concurrency.lockutils [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1021.886212] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d277ecf-528d-428f-bdec-2b42ded3a934 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.894210] env[70020]: DEBUG oslo_vmware.api [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for the task: (returnval){ [ 1021.894210] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]528aa453-2056-1696-9f2e-437233418785" [ 1021.894210] env[70020]: _type = "Task" [ 1021.894210] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.905315] env[70020]: DEBUG oslo_vmware.api [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]528aa453-2056-1696-9f2e-437233418785, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.908481] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6e3dbd58-1bc5-49f1-b8b1-a14866388907 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "da07cb36-244f-4f48-a5b6-8d00324c1edf" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.309s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1021.959851] env[70020]: DEBUG nova.network.neutron [req-bd2e9cd4-5553-4b1a-ba87-d2575f7af180 req-1ae14e97-27fd-47a7-8ed6-2e3458065771 service nova] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Updated VIF entry in instance network info cache for port 02386321-e9cb-45ce-b235-d3c121d3cff1. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1021.960294] env[70020]: DEBUG nova.network.neutron [req-bd2e9cd4-5553-4b1a-ba87-d2575f7af180 req-1ae14e97-27fd-47a7-8ed6-2e3458065771 service nova] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Updating instance_info_cache with network_info: [{"id": "02386321-e9cb-45ce-b235-d3c121d3cff1", "address": "fa:16:3e:c5:9d:a4", "network": {"id": "a4584b23-3c15-4ae9-b89e-0a0e14eeccb8", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1275535043-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6c8373e835ad4420890442390872c6fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02386321-e9", "ovs_interfaceid": "02386321-e9cb-45ce-b235-d3c121d3cff1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1021.992995] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5596f441-594a-4393-9a31-44437a1384b1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.000975] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2b981bd-0579-4844-a2e5-97a622fb4d45 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.037620] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15194b01-9297-45ca-95d2-513f5092ad1a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.047394] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-187e249a-8465-47ff-9b69-fb468176db46 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.061212] env[70020]: DEBUG nova.compute.provider_tree [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1022.093427] env[70020]: DEBUG nova.network.neutron [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1022.127326] env[70020]: DEBUG nova.network.neutron [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Successfully created port: d9bd6893-0205-4ae9-9f12-07dbc827824e {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1022.136279] env[70020]: DEBUG oslo_concurrency.lockutils [None req-50b00f0f-67d6-4a9f-b5c4-a1e2f49d5cab tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "42d20396-883d-4141-a226-61f476057cbe" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1022.136514] env[70020]: DEBUG oslo_concurrency.lockutils [None req-50b00f0f-67d6-4a9f-b5c4-a1e2f49d5cab tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "42d20396-883d-4141-a226-61f476057cbe" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1022.136777] env[70020]: DEBUG oslo_concurrency.lockutils [None req-50b00f0f-67d6-4a9f-b5c4-a1e2f49d5cab tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "42d20396-883d-4141-a226-61f476057cbe-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1022.136980] env[70020]: DEBUG oslo_concurrency.lockutils [None req-50b00f0f-67d6-4a9f-b5c4-a1e2f49d5cab tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "42d20396-883d-4141-a226-61f476057cbe-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1022.137170] env[70020]: DEBUG oslo_concurrency.lockutils [None req-50b00f0f-67d6-4a9f-b5c4-a1e2f49d5cab tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "42d20396-883d-4141-a226-61f476057cbe-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1022.140403] env[70020]: INFO nova.compute.manager [None req-50b00f0f-67d6-4a9f-b5c4-a1e2f49d5cab tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Terminating instance [ 1022.141827] env[70020]: DEBUG nova.compute.manager [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1022.260780] env[70020]: DEBUG nova.compute.manager [req-c2ca49ba-18ff-4266-b732-a8cb09fcf06e req-4f0e5249-83c9-4952-b921-bcac56b4d2b1 service nova] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Received event network-vif-deleted-d1869b01-6eea-468c-ac71-153c8eeda8ca {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1022.260974] env[70020]: INFO nova.compute.manager [req-c2ca49ba-18ff-4266-b732-a8cb09fcf06e req-4f0e5249-83c9-4952-b921-bcac56b4d2b1 service nova] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Neutron deleted interface d1869b01-6eea-468c-ac71-153c8eeda8ca; detaching it from the instance and deleting it from the info cache [ 1022.261157] env[70020]: DEBUG nova.network.neutron [req-c2ca49ba-18ff-4266-b732-a8cb09fcf06e req-4f0e5249-83c9-4952-b921-bcac56b4d2b1 service nova] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1022.300369] env[70020]: DEBUG nova.network.neutron [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Updating instance_info_cache with network_info: [{"id": "f3bb6c02-a473-447c-a316-a09dfd62af88", "address": "fa:16:3e:dd:b4:67", "network": {"id": "83a3ee04-e0ff-40e7-ae51-c463add43abb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2003290189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cfa7d3b1f5a14c60b19cde5030c2f0a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3bb6c02-a4", "ovs_interfaceid": "f3bb6c02-a473-447c-a316-a09dfd62af88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1022.407235] env[70020]: DEBUG oslo_vmware.api [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]528aa453-2056-1696-9f2e-437233418785, 'name': SearchDatastore_Task, 'duration_secs': 0.026391} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.407235] env[70020]: DEBUG oslo_concurrency.lockutils [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1022.408034] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1022.408034] env[70020]: DEBUG oslo_concurrency.lockutils [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.408034] env[70020]: DEBUG oslo_concurrency.lockutils [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1022.408271] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1022.408572] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-93791b41-ef0b-430a-a845-3aa72d7ead5d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.417284] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1022.417470] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1022.418196] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f30481a-0eac-4d11-9709-227fc969dbe7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.423059] env[70020]: DEBUG oslo_vmware.api [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for the task: (returnval){ [ 1022.423059] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52bb1735-5b2b-5299-dd59-be98728ce8de" [ 1022.423059] env[70020]: _type = "Task" [ 1022.423059] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.432506] env[70020]: DEBUG oslo_vmware.api [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52bb1735-5b2b-5299-dd59-be98728ce8de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.468300] env[70020]: DEBUG oslo_concurrency.lockutils [req-bd2e9cd4-5553-4b1a-ba87-d2575f7af180 req-1ae14e97-27fd-47a7-8ed6-2e3458065771 service nova] Releasing lock "refresh_cache-d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1022.468300] env[70020]: DEBUG nova.compute.manager [req-bd2e9cd4-5553-4b1a-ba87-d2575f7af180 req-1ae14e97-27fd-47a7-8ed6-2e3458065771 service nova] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Received event network-vif-plugged-f3bb6c02-a473-447c-a316-a09dfd62af88 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1022.468300] env[70020]: DEBUG oslo_concurrency.lockutils [req-bd2e9cd4-5553-4b1a-ba87-d2575f7af180 req-1ae14e97-27fd-47a7-8ed6-2e3458065771 service nova] Acquiring lock "056141e3-5628-4451-bd25-f4fa15edd11e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1022.468300] env[70020]: DEBUG oslo_concurrency.lockutils [req-bd2e9cd4-5553-4b1a-ba87-d2575f7af180 req-1ae14e97-27fd-47a7-8ed6-2e3458065771 service nova] Lock "056141e3-5628-4451-bd25-f4fa15edd11e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1022.468300] env[70020]: DEBUG oslo_concurrency.lockutils [req-bd2e9cd4-5553-4b1a-ba87-d2575f7af180 req-1ae14e97-27fd-47a7-8ed6-2e3458065771 service nova] Lock "056141e3-5628-4451-bd25-f4fa15edd11e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1022.468300] env[70020]: DEBUG nova.compute.manager [req-bd2e9cd4-5553-4b1a-ba87-d2575f7af180 req-1ae14e97-27fd-47a7-8ed6-2e3458065771 service nova] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] No waiting events found dispatching network-vif-plugged-f3bb6c02-a473-447c-a316-a09dfd62af88 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1022.468300] env[70020]: WARNING nova.compute.manager [req-bd2e9cd4-5553-4b1a-ba87-d2575f7af180 req-1ae14e97-27fd-47a7-8ed6-2e3458065771 service nova] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Received unexpected event network-vif-plugged-f3bb6c02-a473-447c-a316-a09dfd62af88 for instance with vm_state building and task_state spawning. [ 1022.468300] env[70020]: DEBUG nova.compute.manager [req-bd2e9cd4-5553-4b1a-ba87-d2575f7af180 req-1ae14e97-27fd-47a7-8ed6-2e3458065771 service nova] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Received event network-changed-f3bb6c02-a473-447c-a316-a09dfd62af88 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1022.468300] env[70020]: DEBUG nova.compute.manager [req-bd2e9cd4-5553-4b1a-ba87-d2575f7af180 req-1ae14e97-27fd-47a7-8ed6-2e3458065771 service nova] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Refreshing instance network info cache due to event network-changed-f3bb6c02-a473-447c-a316-a09dfd62af88. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1022.468300] env[70020]: DEBUG oslo_concurrency.lockutils [req-bd2e9cd4-5553-4b1a-ba87-d2575f7af180 req-1ae14e97-27fd-47a7-8ed6-2e3458065771 service nova] Acquiring lock "refresh_cache-056141e3-5628-4451-bd25-f4fa15edd11e" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.585784] env[70020]: ERROR nova.scheduler.client.report [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] [req-c617b644-91b8-4004-8eae-86e6a4f88e8a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c617b644-91b8-4004-8eae-86e6a4f88e8a"}]} [ 1022.607843] env[70020]: DEBUG nova.scheduler.client.report [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1022.621578] env[70020]: DEBUG nova.scheduler.client.report [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1022.621903] env[70020]: DEBUG nova.compute.provider_tree [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1022.634154] env[70020]: DEBUG nova.scheduler.client.report [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1022.651523] env[70020]: DEBUG nova.compute.manager [None req-50b00f0f-67d6-4a9f-b5c4-a1e2f49d5cab tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1022.651726] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-50b00f0f-67d6-4a9f-b5c4-a1e2f49d5cab tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1022.652830] env[70020]: DEBUG nova.scheduler.client.report [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1022.655301] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d149ee66-7efd-4b18-acfd-6c0ba21d580e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.662942] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-50b00f0f-67d6-4a9f-b5c4-a1e2f49d5cab tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1022.663194] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-32412082-61b7-4133-b4c7-e36aef2ce725 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.670388] env[70020]: DEBUG oslo_vmware.api [None req-50b00f0f-67d6-4a9f-b5c4-a1e2f49d5cab tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 1022.670388] env[70020]: value = "task-3618733" [ 1022.670388] env[70020]: _type = "Task" [ 1022.670388] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.682165] env[70020]: DEBUG oslo_vmware.api [None req-50b00f0f-67d6-4a9f-b5c4-a1e2f49d5cab tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618733, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.688582] env[70020]: DEBUG nova.network.neutron [-] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1022.764041] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-631754c8-8996-4506-b4cc-d0a23ddce2a6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.773302] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54b785a6-9359-45b3-ae08-94f79425fcc3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.806445] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Releasing lock "refresh_cache-056141e3-5628-4451-bd25-f4fa15edd11e" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1022.806733] env[70020]: DEBUG nova.compute.manager [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Instance network_info: |[{"id": "f3bb6c02-a473-447c-a316-a09dfd62af88", "address": "fa:16:3e:dd:b4:67", "network": {"id": "83a3ee04-e0ff-40e7-ae51-c463add43abb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2003290189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cfa7d3b1f5a14c60b19cde5030c2f0a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3bb6c02-a4", "ovs_interfaceid": "f3bb6c02-a473-447c-a316-a09dfd62af88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1022.807110] env[70020]: DEBUG nova.compute.manager [req-c2ca49ba-18ff-4266-b732-a8cb09fcf06e req-4f0e5249-83c9-4952-b921-bcac56b4d2b1 service nova] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Detach interface failed, port_id=d1869b01-6eea-468c-ac71-153c8eeda8ca, reason: Instance 3a4f2342-58e7-436b-a779-0fa093b52409 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1022.809995] env[70020]: DEBUG oslo_concurrency.lockutils [req-bd2e9cd4-5553-4b1a-ba87-d2575f7af180 req-1ae14e97-27fd-47a7-8ed6-2e3458065771 service nova] Acquired lock "refresh_cache-056141e3-5628-4451-bd25-f4fa15edd11e" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1022.810209] env[70020]: DEBUG nova.network.neutron [req-bd2e9cd4-5553-4b1a-ba87-d2575f7af180 req-1ae14e97-27fd-47a7-8ed6-2e3458065771 service nova] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Refreshing network info cache for port f3bb6c02-a473-447c-a316-a09dfd62af88 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1022.811351] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dd:b4:67', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d4ef133-b6f3-41d1-add4-92a1482195cf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f3bb6c02-a473-447c-a316-a09dfd62af88', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1022.818740] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1022.820011] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1022.820254] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-da96ee85-ba00-4850-8b1d-1ffecdcf4421 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.842739] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1022.842739] env[70020]: value = "task-3618734" [ 1022.842739] env[70020]: _type = "Task" [ 1022.842739] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.854737] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618734, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.858803] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1ea663fa-61ae-4dc1-8f05-36a08638c447 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Acquiring lock "da07cb36-244f-4f48-a5b6-8d00324c1edf" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1022.859040] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1ea663fa-61ae-4dc1-8f05-36a08638c447 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "da07cb36-244f-4f48-a5b6-8d00324c1edf" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1022.936938] env[70020]: DEBUG oslo_vmware.api [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52bb1735-5b2b-5299-dd59-be98728ce8de, 'name': SearchDatastore_Task, 'duration_secs': 0.008641} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.940950] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-189cb696-c1bb-44db-9889-9fc8a216b9dd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.946896] env[70020]: DEBUG oslo_vmware.api [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for the task: (returnval){ [ 1022.946896] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52148e97-54a3-5072-f106-8b7ed02bf0cb" [ 1022.946896] env[70020]: _type = "Task" [ 1022.946896] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.958823] env[70020]: DEBUG oslo_vmware.api [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52148e97-54a3-5072-f106-8b7ed02bf0cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.017030] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd77ed30-bd6c-4e3d-9120-7941ef771bf2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.023995] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5d845ed-7379-4042-95f7-c02f4a842bce {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.056998] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9370ba8f-0a0a-4c40-80dd-b9c42e962785 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.066586] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dc04ee9-e973-4778-8ee4-d2133c58bd69 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.081114] env[70020]: DEBUG nova.compute.provider_tree [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1023.154173] env[70020]: DEBUG nova.compute.manager [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1023.181710] env[70020]: DEBUG oslo_vmware.api [None req-50b00f0f-67d6-4a9f-b5c4-a1e2f49d5cab tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618733, 'name': PowerOffVM_Task, 'duration_secs': 0.229284} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.183842] env[70020]: DEBUG nova.virt.hardware [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1023.184083] env[70020]: DEBUG nova.virt.hardware [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1023.184243] env[70020]: DEBUG nova.virt.hardware [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1023.184460] env[70020]: DEBUG nova.virt.hardware [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1023.184613] env[70020]: DEBUG nova.virt.hardware [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1023.184766] env[70020]: DEBUG nova.virt.hardware [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1023.184983] env[70020]: DEBUG nova.virt.hardware [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1023.185163] env[70020]: DEBUG nova.virt.hardware [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1023.185328] env[70020]: DEBUG nova.virt.hardware [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1023.185490] env[70020]: DEBUG nova.virt.hardware [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1023.185658] env[70020]: DEBUG nova.virt.hardware [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1023.185941] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-50b00f0f-67d6-4a9f-b5c4-a1e2f49d5cab tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1023.186120] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-50b00f0f-67d6-4a9f-b5c4-a1e2f49d5cab tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1023.186841] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80b0df6e-aef1-470c-a506-6be467d12fa8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.190134] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7602d816-d436-4e85-86d7-719c818f13a2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.191915] env[70020]: INFO nova.compute.manager [-] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Took 1.54 seconds to deallocate network for instance. [ 1023.199741] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10e443da-8862-4453-8272-94e1d30998aa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.255594] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-50b00f0f-67d6-4a9f-b5c4-a1e2f49d5cab tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1023.255852] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-50b00f0f-67d6-4a9f-b5c4-a1e2f49d5cab tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1023.256055] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-50b00f0f-67d6-4a9f-b5c4-a1e2f49d5cab tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Deleting the datastore file [datastore1] 42d20396-883d-4141-a226-61f476057cbe {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1023.256321] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9875f768-5b41-454c-9706-1e0a8bc0cb32 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.263161] env[70020]: DEBUG oslo_vmware.api [None req-50b00f0f-67d6-4a9f-b5c4-a1e2f49d5cab tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for the task: (returnval){ [ 1023.263161] env[70020]: value = "task-3618736" [ 1023.263161] env[70020]: _type = "Task" [ 1023.263161] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.271312] env[70020]: DEBUG oslo_vmware.api [None req-50b00f0f-67d6-4a9f-b5c4-a1e2f49d5cab tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618736, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.353583] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618734, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.361791] env[70020]: DEBUG nova.compute.utils [None req-1ea663fa-61ae-4dc1-8f05-36a08638c447 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1023.459160] env[70020]: DEBUG oslo_vmware.api [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52148e97-54a3-5072-f106-8b7ed02bf0cb, 'name': SearchDatastore_Task, 'duration_secs': 0.010786} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.459160] env[70020]: DEBUG oslo_concurrency.lockutils [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1023.459333] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52/d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1023.459597] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3f732fa4-9120-4745-bcbf-8bff85a3a4b8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.466392] env[70020]: DEBUG oslo_vmware.api [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for the task: (returnval){ [ 1023.466392] env[70020]: value = "task-3618737" [ 1023.466392] env[70020]: _type = "Task" [ 1023.466392] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.474337] env[70020]: DEBUG oslo_vmware.api [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618737, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.574561] env[70020]: DEBUG nova.network.neutron [req-bd2e9cd4-5553-4b1a-ba87-d2575f7af180 req-1ae14e97-27fd-47a7-8ed6-2e3458065771 service nova] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Updated VIF entry in instance network info cache for port f3bb6c02-a473-447c-a316-a09dfd62af88. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1023.574561] env[70020]: DEBUG nova.network.neutron [req-bd2e9cd4-5553-4b1a-ba87-d2575f7af180 req-1ae14e97-27fd-47a7-8ed6-2e3458065771 service nova] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Updating instance_info_cache with network_info: [{"id": "f3bb6c02-a473-447c-a316-a09dfd62af88", "address": "fa:16:3e:dd:b4:67", "network": {"id": "83a3ee04-e0ff-40e7-ae51-c463add43abb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2003290189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cfa7d3b1f5a14c60b19cde5030c2f0a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3bb6c02-a4", "ovs_interfaceid": "f3bb6c02-a473-447c-a316-a09dfd62af88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.625164] env[70020]: DEBUG nova.scheduler.client.report [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Updated inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with generation 124 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1023.625164] env[70020]: DEBUG nova.compute.provider_tree [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Updating resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d generation from 124 to 125 during operation: update_inventory {{(pid=70020) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1023.625164] env[70020]: DEBUG nova.compute.provider_tree [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1023.705844] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2426b0f8-dc96-48a5-98aa-7060541b5428 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1023.742651] env[70020]: DEBUG nova.network.neutron [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Successfully updated port: d9bd6893-0205-4ae9-9f12-07dbc827824e {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1023.776415] env[70020]: DEBUG oslo_vmware.api [None req-50b00f0f-67d6-4a9f-b5c4-a1e2f49d5cab tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Task: {'id': task-3618736, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13073} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.776647] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-50b00f0f-67d6-4a9f-b5c4-a1e2f49d5cab tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1023.776842] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-50b00f0f-67d6-4a9f-b5c4-a1e2f49d5cab tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1023.777059] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-50b00f0f-67d6-4a9f-b5c4-a1e2f49d5cab tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1023.777292] env[70020]: INFO nova.compute.manager [None req-50b00f0f-67d6-4a9f-b5c4-a1e2f49d5cab tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] [instance: 42d20396-883d-4141-a226-61f476057cbe] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1023.777537] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-50b00f0f-67d6-4a9f-b5c4-a1e2f49d5cab tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1023.777791] env[70020]: DEBUG nova.compute.manager [-] [instance: 42d20396-883d-4141-a226-61f476057cbe] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1023.777910] env[70020]: DEBUG nova.network.neutron [-] [instance: 42d20396-883d-4141-a226-61f476057cbe] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1023.859698] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618734, 'name': CreateVM_Task, 'duration_secs': 0.533248} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.859978] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1023.861022] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1023.861310] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1023.861827] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1023.862227] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d70908c-d36a-4094-b038-943654892488 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.865507] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1ea663fa-61ae-4dc1-8f05-36a08638c447 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "da07cb36-244f-4f48-a5b6-8d00324c1edf" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1023.873089] env[70020]: DEBUG oslo_vmware.api [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 1023.873089] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52c1d9e9-ea29-4747-8db4-e4bdc53c9e8f" [ 1023.873089] env[70020]: _type = "Task" [ 1023.873089] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.887095] env[70020]: DEBUG oslo_vmware.api [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52c1d9e9-ea29-4747-8db4-e4bdc53c9e8f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.949924] env[70020]: DEBUG nova.compute.manager [req-a28de687-f973-49a8-b287-fdaff520469d req-d114627a-fc53-4e48-a926-6414d6defa7e service nova] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Received event network-vif-plugged-d9bd6893-0205-4ae9-9f12-07dbc827824e {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1023.950248] env[70020]: DEBUG oslo_concurrency.lockutils [req-a28de687-f973-49a8-b287-fdaff520469d req-d114627a-fc53-4e48-a926-6414d6defa7e service nova] Acquiring lock "0453722d-258f-49e3-b61e-f1081eb465c6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1023.950817] env[70020]: DEBUG oslo_concurrency.lockutils [req-a28de687-f973-49a8-b287-fdaff520469d req-d114627a-fc53-4e48-a926-6414d6defa7e service nova] Lock "0453722d-258f-49e3-b61e-f1081eb465c6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1023.951216] env[70020]: DEBUG oslo_concurrency.lockutils [req-a28de687-f973-49a8-b287-fdaff520469d req-d114627a-fc53-4e48-a926-6414d6defa7e service nova] Lock "0453722d-258f-49e3-b61e-f1081eb465c6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1023.951757] env[70020]: DEBUG nova.compute.manager [req-a28de687-f973-49a8-b287-fdaff520469d req-d114627a-fc53-4e48-a926-6414d6defa7e service nova] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] No waiting events found dispatching network-vif-plugged-d9bd6893-0205-4ae9-9f12-07dbc827824e {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1023.951990] env[70020]: WARNING nova.compute.manager [req-a28de687-f973-49a8-b287-fdaff520469d req-d114627a-fc53-4e48-a926-6414d6defa7e service nova] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Received unexpected event network-vif-plugged-d9bd6893-0205-4ae9-9f12-07dbc827824e for instance with vm_state building and task_state spawning. [ 1023.977601] env[70020]: DEBUG oslo_vmware.api [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618737, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.506443} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.977957] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52/d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1023.978218] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1023.978563] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4d8f1054-62ff-4065-963c-0e1e3ca18d87 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.986026] env[70020]: DEBUG oslo_vmware.api [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for the task: (returnval){ [ 1023.986026] env[70020]: value = "task-3618738" [ 1023.986026] env[70020]: _type = "Task" [ 1023.986026] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.994996] env[70020]: DEBUG oslo_vmware.api [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618738, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.077083] env[70020]: DEBUG oslo_concurrency.lockutils [req-bd2e9cd4-5553-4b1a-ba87-d2575f7af180 req-1ae14e97-27fd-47a7-8ed6-2e3458065771 service nova] Releasing lock "refresh_cache-056141e3-5628-4451-bd25-f4fa15edd11e" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1024.130048] env[70020]: DEBUG oslo_concurrency.lockutils [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.999s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1024.134646] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f736da04-0083-4d76-8f10-820dc62cbe19 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.761s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1024.134914] env[70020]: DEBUG nova.objects.instance [None req-f736da04-0083-4d76-8f10-820dc62cbe19 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Lazy-loading 'resources' on Instance uuid 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1024.160945] env[70020]: INFO nova.scheduler.client.report [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Deleted allocations for instance 58dded95-033a-46d7-b02e-5b2f2551234c [ 1024.245826] env[70020]: DEBUG oslo_concurrency.lockutils [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "refresh_cache-0453722d-258f-49e3-b61e-f1081eb465c6" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.245988] env[70020]: DEBUG oslo_concurrency.lockutils [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquired lock "refresh_cache-0453722d-258f-49e3-b61e-f1081eb465c6" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1024.246196] env[70020]: DEBUG nova.network.neutron [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1024.386320] env[70020]: DEBUG oslo_vmware.api [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52c1d9e9-ea29-4747-8db4-e4bdc53c9e8f, 'name': SearchDatastore_Task, 'duration_secs': 0.026025} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.386599] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1024.386862] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1024.387080] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.387250] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1024.387486] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1024.387702] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-873bea89-3dc3-4ffc-8816-0b94191cd688 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.396828] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1024.397063] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1024.397800] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5afd19af-a8a2-4f70-a805-5c16e8ec0f0a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.404953] env[70020]: DEBUG oslo_vmware.api [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 1024.404953] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]524af77a-883e-9e69-beae-bee23830e049" [ 1024.404953] env[70020]: _type = "Task" [ 1024.404953] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.413170] env[70020]: DEBUG oslo_vmware.api [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]524af77a-883e-9e69-beae-bee23830e049, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.495827] env[70020]: DEBUG oslo_vmware.api [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618738, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.369105} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.496129] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1024.496980] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba35fd85-aa87-408c-baf4-8164259192ed {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.523865] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52/d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1024.523865] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a6cc7638-5e10-481b-afc6-0f0ae1299a35 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.545112] env[70020]: DEBUG oslo_vmware.api [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for the task: (returnval){ [ 1024.545112] env[70020]: value = "task-3618739" [ 1024.545112] env[70020]: _type = "Task" [ 1024.545112] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.553693] env[70020]: DEBUG nova.network.neutron [-] [instance: 42d20396-883d-4141-a226-61f476057cbe] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1024.555012] env[70020]: DEBUG oslo_vmware.api [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618739, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.671076] env[70020]: DEBUG oslo_concurrency.lockutils [None req-942f7918-0499-4426-8c27-ae1ffdafc531 tempest-FloatingIPsAssociationNegativeTestJSON-925045144 tempest-FloatingIPsAssociationNegativeTestJSON-925045144-project-member] Lock "58dded95-033a-46d7-b02e-5b2f2551234c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.755s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1024.783673] env[70020]: DEBUG nova.network.neutron [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1024.918432] env[70020]: DEBUG oslo_vmware.api [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]524af77a-883e-9e69-beae-bee23830e049, 'name': SearchDatastore_Task, 'duration_secs': 0.00967} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.919569] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eeab549f-60be-4909-b477-a2e1cafd7972 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.924779] env[70020]: DEBUG oslo_vmware.api [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 1024.924779] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52c17832-e17c-64ca-fe90-91eb7f8e9726" [ 1024.924779] env[70020]: _type = "Task" [ 1024.924779] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.935203] env[70020]: DEBUG oslo_vmware.api [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52c17832-e17c-64ca-fe90-91eb7f8e9726, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.947235] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1ea663fa-61ae-4dc1-8f05-36a08638c447 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Acquiring lock "da07cb36-244f-4f48-a5b6-8d00324c1edf" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1024.947561] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1ea663fa-61ae-4dc1-8f05-36a08638c447 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "da07cb36-244f-4f48-a5b6-8d00324c1edf" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1024.947719] env[70020]: INFO nova.compute.manager [None req-1ea663fa-61ae-4dc1-8f05-36a08638c447 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Attaching volume 0c646aab-21b0-4b8c-9b2a-36335a4b1275 to /dev/sdc [ 1024.953099] env[70020]: DEBUG nova.network.neutron [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Updating instance_info_cache with network_info: [{"id": "d9bd6893-0205-4ae9-9f12-07dbc827824e", "address": "fa:16:3e:d6:ff:45", "network": {"id": "bff860aa-ea09-4260-84e8-d9ffd2c5bf4b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1893681432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd3733a000724aab9255cb498cecdfba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9bd6893-02", "ovs_interfaceid": "d9bd6893-0205-4ae9-9f12-07dbc827824e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1024.956843] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47e4b5e0-b418-4e2b-8ee2-ba44546081fc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.966542] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fef03198-5bd6-460a-ab9b-59ddc1d952a2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.020423] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8f64876-19fd-4a58-83ac-3ab8efac5219 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.025993] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6409b7d7-5153-48c5-81ed-da4046652ef0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.038495] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bd91ec0-ec6f-4836-abf7-ae907d3919ee {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.043709] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23e0d8e3-a459-4e70-b96b-79c2a21e7b43 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.058376] env[70020]: INFO nova.compute.manager [-] [instance: 42d20396-883d-4141-a226-61f476057cbe] Took 1.28 seconds to deallocate network for instance. [ 1025.058905] env[70020]: DEBUG nova.compute.provider_tree [None req-f736da04-0083-4d76-8f10-820dc62cbe19 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1025.065559] env[70020]: DEBUG nova.virt.block_device [None req-1ea663fa-61ae-4dc1-8f05-36a08638c447 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Updating existing volume attachment record: a59b159d-c822-489d-9c3b-1c68c975ce02 {{(pid=70020) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1025.074682] env[70020]: DEBUG oslo_vmware.api [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618739, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.442323] env[70020]: DEBUG oslo_vmware.api [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52c17832-e17c-64ca-fe90-91eb7f8e9726, 'name': SearchDatastore_Task, 'duration_secs': 0.041036} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.442323] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1025.442323] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 056141e3-5628-4451-bd25-f4fa15edd11e/056141e3-5628-4451-bd25-f4fa15edd11e.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1025.442323] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d1369a3d-142a-44a2-b06c-95e13a4aa63d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.451477] env[70020]: DEBUG oslo_vmware.api [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 1025.451477] env[70020]: value = "task-3618741" [ 1025.451477] env[70020]: _type = "Task" [ 1025.451477] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.462827] env[70020]: DEBUG oslo_concurrency.lockutils [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Releasing lock "refresh_cache-0453722d-258f-49e3-b61e-f1081eb465c6" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1025.462827] env[70020]: DEBUG nova.compute.manager [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Instance network_info: |[{"id": "d9bd6893-0205-4ae9-9f12-07dbc827824e", "address": "fa:16:3e:d6:ff:45", "network": {"id": "bff860aa-ea09-4260-84e8-d9ffd2c5bf4b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1893681432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd3733a000724aab9255cb498cecdfba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9bd6893-02", "ovs_interfaceid": "d9bd6893-0205-4ae9-9f12-07dbc827824e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1025.465149] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:ff:45', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3952eb02-1162-48ed-8227-9c138960d583', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd9bd6893-0205-4ae9-9f12-07dbc827824e', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1025.473270] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1025.473640] env[70020]: DEBUG oslo_vmware.api [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618741, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.473992] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1025.474350] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-15b464ca-bf2f-4573-9635-c79ca71d16fa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.494899] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1025.494899] env[70020]: value = "task-3618742" [ 1025.494899] env[70020]: _type = "Task" [ 1025.494899] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.505172] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618742, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.556960] env[70020]: DEBUG oslo_vmware.api [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618739, 'name': ReconfigVM_Task, 'duration_secs': 0.944388} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.557269] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Reconfigured VM instance instance-00000059 to attach disk [datastore1] d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52/d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1025.557955] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5234d909-4d70-4f23-8e9f-4cdb74025971 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.564450] env[70020]: DEBUG oslo_vmware.api [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for the task: (returnval){ [ 1025.564450] env[70020]: value = "task-3618743" [ 1025.564450] env[70020]: _type = "Task" [ 1025.564450] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.576319] env[70020]: DEBUG nova.scheduler.client.report [None req-f736da04-0083-4d76-8f10-820dc62cbe19 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1025.580201] env[70020]: DEBUG oslo_concurrency.lockutils [None req-50b00f0f-67d6-4a9f-b5c4-a1e2f49d5cab tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1025.580481] env[70020]: DEBUG oslo_vmware.api [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618743, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.963379] env[70020]: DEBUG oslo_vmware.api [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618741, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.475679} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.963646] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 056141e3-5628-4451-bd25-f4fa15edd11e/056141e3-5628-4451-bd25-f4fa15edd11e.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1025.963857] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1025.964124] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-14eba3d5-d754-4b83-a04e-bdf172d01c3d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.970327] env[70020]: DEBUG oslo_vmware.api [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 1025.970327] env[70020]: value = "task-3618744" [ 1025.970327] env[70020]: _type = "Task" [ 1025.970327] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.981224] env[70020]: DEBUG oslo_vmware.api [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618744, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.004633] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618742, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.070808] env[70020]: DEBUG nova.compute.manager [req-2bc92af9-bae3-4efc-8d73-cb269f36e66d req-6c2b30d1-81a2-41c7-9c3d-a08097facd2f service nova] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Received event network-changed-d9bd6893-0205-4ae9-9f12-07dbc827824e {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1026.071008] env[70020]: DEBUG nova.compute.manager [req-2bc92af9-bae3-4efc-8d73-cb269f36e66d req-6c2b30d1-81a2-41c7-9c3d-a08097facd2f service nova] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Refreshing instance network info cache due to event network-changed-d9bd6893-0205-4ae9-9f12-07dbc827824e. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1026.071260] env[70020]: DEBUG oslo_concurrency.lockutils [req-2bc92af9-bae3-4efc-8d73-cb269f36e66d req-6c2b30d1-81a2-41c7-9c3d-a08097facd2f service nova] Acquiring lock "refresh_cache-0453722d-258f-49e3-b61e-f1081eb465c6" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.071372] env[70020]: DEBUG oslo_concurrency.lockutils [req-2bc92af9-bae3-4efc-8d73-cb269f36e66d req-6c2b30d1-81a2-41c7-9c3d-a08097facd2f service nova] Acquired lock "refresh_cache-0453722d-258f-49e3-b61e-f1081eb465c6" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1026.071527] env[70020]: DEBUG nova.network.neutron [req-2bc92af9-bae3-4efc-8d73-cb269f36e66d req-6c2b30d1-81a2-41c7-9c3d-a08097facd2f service nova] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Refreshing network info cache for port d9bd6893-0205-4ae9-9f12-07dbc827824e {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1026.080645] env[70020]: DEBUG oslo_vmware.api [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618743, 'name': Rename_Task, 'duration_secs': 0.174382} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.084134] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f736da04-0083-4d76-8f10-820dc62cbe19 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.950s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1026.089351] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1026.091507] env[70020]: DEBUG oslo_concurrency.lockutils [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.206s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1026.092705] env[70020]: INFO nova.compute.claims [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1026.095889] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-90a7bce3-8937-40a8-b766-2c8fcf892db8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.104475] env[70020]: DEBUG oslo_vmware.api [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for the task: (returnval){ [ 1026.104475] env[70020]: value = "task-3618745" [ 1026.104475] env[70020]: _type = "Task" [ 1026.104475] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.115192] env[70020]: DEBUG oslo_vmware.api [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618745, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.130929] env[70020]: INFO nova.scheduler.client.report [None req-f736da04-0083-4d76-8f10-820dc62cbe19 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Deleted allocations for instance 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6 [ 1026.484534] env[70020]: DEBUG oslo_vmware.api [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618744, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067179} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.485366] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1026.486936] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aa8e821-ec65-4f37-ba82-14d5c759ff0e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.520209] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Reconfiguring VM instance instance-0000005a to attach disk [datastore2] 056141e3-5628-4451-bd25-f4fa15edd11e/056141e3-5628-4451-bd25-f4fa15edd11e.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1026.521759] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-34343761-fe7e-4279-b2cd-260d213af726 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.542049] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618742, 'name': CreateVM_Task, 'duration_secs': 0.551973} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.544513] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1026.544513] env[70020]: DEBUG oslo_vmware.api [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 1026.544513] env[70020]: value = "task-3618746" [ 1026.544513] env[70020]: _type = "Task" [ 1026.544513] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.544513] env[70020]: DEBUG oslo_concurrency.lockutils [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.544513] env[70020]: DEBUG oslo_concurrency.lockutils [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1026.544864] env[70020]: DEBUG oslo_concurrency.lockutils [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1026.544954] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b27046f3-de8f-4569-a57e-a955b4ea3811 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.553743] env[70020]: DEBUG oslo_vmware.api [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1026.553743] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5226a110-a1b4-4a32-51bb-e125b3d68fca" [ 1026.553743] env[70020]: _type = "Task" [ 1026.553743] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.557044] env[70020]: DEBUG oslo_vmware.api [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618746, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.564826] env[70020]: DEBUG oslo_vmware.api [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5226a110-a1b4-4a32-51bb-e125b3d68fca, 'name': SearchDatastore_Task, 'duration_secs': 0.010209} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.565110] env[70020]: DEBUG oslo_concurrency.lockutils [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1026.569016] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1026.569016] env[70020]: DEBUG oslo_concurrency.lockutils [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.569016] env[70020]: DEBUG oslo_concurrency.lockutils [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1026.569016] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1026.569016] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d077732c-da18-40a2-97b4-1cea07bf8369 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.573587] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1026.573842] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1026.576580] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05fd1332-a995-4a26-a919-dae2fb7951c5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.581669] env[70020]: DEBUG oslo_vmware.api [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1026.581669] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5204646e-ebd5-f625-a5fd-eb6e19af7975" [ 1026.581669] env[70020]: _type = "Task" [ 1026.581669] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.589265] env[70020]: DEBUG oslo_vmware.api [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5204646e-ebd5-f625-a5fd-eb6e19af7975, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.613708] env[70020]: DEBUG oslo_vmware.api [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618745, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.637602] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f736da04-0083-4d76-8f10-820dc62cbe19 tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Lock "7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.846s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1026.807846] env[70020]: DEBUG nova.network.neutron [req-2bc92af9-bae3-4efc-8d73-cb269f36e66d req-6c2b30d1-81a2-41c7-9c3d-a08097facd2f service nova] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Updated VIF entry in instance network info cache for port d9bd6893-0205-4ae9-9f12-07dbc827824e. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1026.808220] env[70020]: DEBUG nova.network.neutron [req-2bc92af9-bae3-4efc-8d73-cb269f36e66d req-6c2b30d1-81a2-41c7-9c3d-a08097facd2f service nova] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Updating instance_info_cache with network_info: [{"id": "d9bd6893-0205-4ae9-9f12-07dbc827824e", "address": "fa:16:3e:d6:ff:45", "network": {"id": "bff860aa-ea09-4260-84e8-d9ffd2c5bf4b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1893681432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd3733a000724aab9255cb498cecdfba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9bd6893-02", "ovs_interfaceid": "d9bd6893-0205-4ae9-9f12-07dbc827824e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.054941] env[70020]: DEBUG oslo_vmware.api [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618746, 'name': ReconfigVM_Task, 'duration_secs': 0.308408} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.055192] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Reconfigured VM instance instance-0000005a to attach disk [datastore2] 056141e3-5628-4451-bd25-f4fa15edd11e/056141e3-5628-4451-bd25-f4fa15edd11e.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1027.055889] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-15748cb1-ca17-447c-9e04-e2fce247c1b8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.062267] env[70020]: DEBUG oslo_vmware.api [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 1027.062267] env[70020]: value = "task-3618747" [ 1027.062267] env[70020]: _type = "Task" [ 1027.062267] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.071386] env[70020]: DEBUG oslo_vmware.api [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618747, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.091497] env[70020]: DEBUG oslo_vmware.api [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5204646e-ebd5-f625-a5fd-eb6e19af7975, 'name': SearchDatastore_Task, 'duration_secs': 0.011062} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.092409] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f06fd1b-7228-4c97-a83a-d4d6a1d18129 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.098169] env[70020]: DEBUG oslo_vmware.api [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1027.098169] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]526095ce-25b5-33f5-bfd8-2372739423a5" [ 1027.098169] env[70020]: _type = "Task" [ 1027.098169] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.112800] env[70020]: DEBUG oslo_vmware.api [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]526095ce-25b5-33f5-bfd8-2372739423a5, 'name': SearchDatastore_Task, 'duration_secs': 0.009543} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.113406] env[70020]: DEBUG oslo_concurrency.lockutils [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1027.113715] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 0453722d-258f-49e3-b61e-f1081eb465c6/0453722d-258f-49e3-b61e-f1081eb465c6.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1027.113969] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-af64e8c6-7f6f-450e-aaa6-79d6f6a27a1a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.118529] env[70020]: DEBUG oslo_vmware.api [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618745, 'name': PowerOnVM_Task, 'duration_secs': 0.832007} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.119092] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1027.119300] env[70020]: INFO nova.compute.manager [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Took 8.92 seconds to spawn the instance on the hypervisor. [ 1027.119478] env[70020]: DEBUG nova.compute.manager [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1027.120248] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b964f6d-2682-4a4a-bb26-50636a3bcd11 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.123981] env[70020]: DEBUG oslo_vmware.api [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1027.123981] env[70020]: value = "task-3618748" [ 1027.123981] env[70020]: _type = "Task" [ 1027.123981] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.139900] env[70020]: DEBUG oslo_vmware.api [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618748, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.312563] env[70020]: DEBUG oslo_concurrency.lockutils [req-2bc92af9-bae3-4efc-8d73-cb269f36e66d req-6c2b30d1-81a2-41c7-9c3d-a08097facd2f service nova] Releasing lock "refresh_cache-0453722d-258f-49e3-b61e-f1081eb465c6" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1027.312930] env[70020]: DEBUG nova.compute.manager [req-2bc92af9-bae3-4efc-8d73-cb269f36e66d req-6c2b30d1-81a2-41c7-9c3d-a08097facd2f service nova] [instance: 42d20396-883d-4141-a226-61f476057cbe] Received event network-vif-deleted-06d51c44-f553-4e0b-9d14-0c9f56972fee {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1027.398578] env[70020]: DEBUG oslo_concurrency.lockutils [None req-314b173b-5156-495b-9ef3-9b4340fa6fdb tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Acquiring lock "45926a02-d0fe-4274-ba47-b97b3e12e4cd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1027.398578] env[70020]: DEBUG oslo_concurrency.lockutils [None req-314b173b-5156-495b-9ef3-9b4340fa6fdb tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Lock "45926a02-d0fe-4274-ba47-b97b3e12e4cd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1027.398732] env[70020]: DEBUG oslo_concurrency.lockutils [None req-314b173b-5156-495b-9ef3-9b4340fa6fdb tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Acquiring lock "45926a02-d0fe-4274-ba47-b97b3e12e4cd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1027.398866] env[70020]: DEBUG oslo_concurrency.lockutils [None req-314b173b-5156-495b-9ef3-9b4340fa6fdb tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Lock "45926a02-d0fe-4274-ba47-b97b3e12e4cd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1027.399077] env[70020]: DEBUG oslo_concurrency.lockutils [None req-314b173b-5156-495b-9ef3-9b4340fa6fdb tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Lock "45926a02-d0fe-4274-ba47-b97b3e12e4cd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1027.401548] env[70020]: INFO nova.compute.manager [None req-314b173b-5156-495b-9ef3-9b4340fa6fdb tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Terminating instance [ 1027.419336] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b96fdc76-d512-4109-8d09-06a7e792cc05 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.428903] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b3e6ef7-ef48-4ab7-9df9-0ad878693a15 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.465047] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae145832-af38-4647-86e5-cbac5b741e6c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.473468] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a08234a-38cd-46a9-9eff-c3000d5c09fb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.490977] env[70020]: DEBUG nova.compute.provider_tree [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1027.578758] env[70020]: DEBUG oslo_vmware.api [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618747, 'name': Rename_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.634586] env[70020]: DEBUG oslo_vmware.api [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618748, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.493375} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.634856] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 0453722d-258f-49e3-b61e-f1081eb465c6/0453722d-258f-49e3-b61e-f1081eb465c6.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1027.635120] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1027.635371] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-87bae2dd-4fe5-429a-87e7-3e4845c9e454 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.643718] env[70020]: INFO nova.compute.manager [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Took 39.88 seconds to build instance. [ 1027.647492] env[70020]: DEBUG oslo_vmware.api [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1027.647492] env[70020]: value = "task-3618750" [ 1027.647492] env[70020]: _type = "Task" [ 1027.647492] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.656386] env[70020]: DEBUG oslo_vmware.api [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618750, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.908857] env[70020]: DEBUG nova.compute.manager [None req-314b173b-5156-495b-9ef3-9b4340fa6fdb tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1027.909513] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-314b173b-5156-495b-9ef3-9b4340fa6fdb tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1027.910076] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98acb3f1-fdc8-414d-9043-cdc12de5fb0f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.920578] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-314b173b-5156-495b-9ef3-9b4340fa6fdb tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1027.920867] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2fb9b58f-d24e-4a4f-9836-80c3f00ddab8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.927697] env[70020]: DEBUG oslo_vmware.api [None req-314b173b-5156-495b-9ef3-9b4340fa6fdb tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Waiting for the task: (returnval){ [ 1027.927697] env[70020]: value = "task-3618751" [ 1027.927697] env[70020]: _type = "Task" [ 1027.927697] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.936266] env[70020]: DEBUG oslo_vmware.api [None req-314b173b-5156-495b-9ef3-9b4340fa6fdb tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618751, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.994770] env[70020]: DEBUG nova.scheduler.client.report [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1028.073863] env[70020]: DEBUG oslo_vmware.api [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618747, 'name': Rename_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.145827] env[70020]: DEBUG oslo_concurrency.lockutils [None req-784e6cd5-ab87-4835-ad76-fa0be54c2875 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Lock "d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.388s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1028.157020] env[70020]: DEBUG oslo_vmware.api [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618750, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.098147} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.157768] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1028.158831] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d505917f-7047-4896-a5d5-c36c692227eb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.183131] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Reconfiguring VM instance instance-0000005b to attach disk [datastore2] 0453722d-258f-49e3-b61e-f1081eb465c6/0453722d-258f-49e3-b61e-f1081eb465c6.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1028.183835] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f3ead049-1baf-4eaf-84ed-9f01fe9093c2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.203252] env[70020]: DEBUG oslo_vmware.api [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1028.203252] env[70020]: value = "task-3618752" [ 1028.203252] env[70020]: _type = "Task" [ 1028.203252] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.212133] env[70020]: DEBUG oslo_vmware.api [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618752, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.437448] env[70020]: DEBUG oslo_vmware.api [None req-314b173b-5156-495b-9ef3-9b4340fa6fdb tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618751, 'name': PowerOffVM_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.501663] env[70020]: DEBUG oslo_concurrency.lockutils [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.411s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1028.502173] env[70020]: DEBUG nova.compute.manager [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1028.504862] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.038s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1028.506305] env[70020]: INFO nova.compute.claims [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1028.523549] env[70020]: INFO nova.compute.manager [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Rescuing [ 1028.523801] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Acquiring lock "refresh_cache-d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.524348] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Acquired lock "refresh_cache-d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1028.527350] env[70020]: DEBUG nova.network.neutron [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1028.575447] env[70020]: DEBUG oslo_vmware.api [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618747, 'name': Rename_Task, 'duration_secs': 1.151026} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.575698] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1028.575953] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d5dcb160-9a45-4bbd-b130-95082e59e498 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.582016] env[70020]: DEBUG oslo_vmware.api [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 1028.582016] env[70020]: value = "task-3618753" [ 1028.582016] env[70020]: _type = "Task" [ 1028.582016] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.590765] env[70020]: DEBUG oslo_vmware.api [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618753, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.713417] env[70020]: DEBUG oslo_vmware.api [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618752, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.940527] env[70020]: DEBUG oslo_vmware.api [None req-314b173b-5156-495b-9ef3-9b4340fa6fdb tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618751, 'name': PowerOffVM_Task, 'duration_secs': 0.599364} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.944995] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-314b173b-5156-495b-9ef3-9b4340fa6fdb tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1028.944995] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-314b173b-5156-495b-9ef3-9b4340fa6fdb tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1028.944995] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9de615d8-0fe3-4a40-a0fa-e153bf94da72 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.011016] env[70020]: DEBUG nova.compute.utils [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1029.018736] env[70020]: DEBUG nova.compute.manager [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1029.019096] env[70020]: DEBUG nova.network.neutron [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1029.021059] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-314b173b-5156-495b-9ef3-9b4340fa6fdb tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1029.021280] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-314b173b-5156-495b-9ef3-9b4340fa6fdb tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1029.021478] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-314b173b-5156-495b-9ef3-9b4340fa6fdb tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Deleting the datastore file [datastore2] 45926a02-d0fe-4274-ba47-b97b3e12e4cd {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1029.022032] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-65c212b2-05e4-4f92-a4de-44cab2b564ef {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.028967] env[70020]: DEBUG oslo_vmware.api [None req-314b173b-5156-495b-9ef3-9b4340fa6fdb tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Waiting for the task: (returnval){ [ 1029.028967] env[70020]: value = "task-3618755" [ 1029.028967] env[70020]: _type = "Task" [ 1029.028967] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.045556] env[70020]: DEBUG oslo_vmware.api [None req-314b173b-5156-495b-9ef3-9b4340fa6fdb tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618755, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.092367] env[70020]: DEBUG oslo_vmware.api [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618753, 'name': PowerOnVM_Task, 'duration_secs': 0.484789} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.092662] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1029.092964] env[70020]: INFO nova.compute.manager [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Took 8.40 seconds to spawn the instance on the hypervisor. [ 1029.093209] env[70020]: DEBUG nova.compute.manager [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1029.094050] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6abdafaf-395b-4ee1-82b0-b33961dc413a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.104390] env[70020]: DEBUG nova.policy [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'afb49d648a70426fa7c39789e51ab625', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e7aae0b70f9d465ebcb9defe385fa434', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1029.213385] env[70020]: DEBUG oslo_vmware.api [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618752, 'name': ReconfigVM_Task, 'duration_secs': 0.84797} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.213657] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Reconfigured VM instance instance-0000005b to attach disk [datastore2] 0453722d-258f-49e3-b61e-f1081eb465c6/0453722d-258f-49e3-b61e-f1081eb465c6.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1029.214317] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9c97d34b-c939-4ce2-abc3-6d0c13613c2a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.224086] env[70020]: DEBUG oslo_vmware.api [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1029.224086] env[70020]: value = "task-3618756" [ 1029.224086] env[70020]: _type = "Task" [ 1029.224086] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.232608] env[70020]: DEBUG oslo_vmware.api [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618756, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.496657] env[70020]: DEBUG nova.network.neutron [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Updating instance_info_cache with network_info: [{"id": "02386321-e9cb-45ce-b235-d3c121d3cff1", "address": "fa:16:3e:c5:9d:a4", "network": {"id": "a4584b23-3c15-4ae9-b89e-0a0e14eeccb8", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1275535043-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6c8373e835ad4420890442390872c6fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02386321-e9", "ovs_interfaceid": "02386321-e9cb-45ce-b235-d3c121d3cff1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.518570] env[70020]: DEBUG nova.compute.manager [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1029.542493] env[70020]: DEBUG oslo_vmware.api [None req-314b173b-5156-495b-9ef3-9b4340fa6fdb tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Task: {'id': task-3618755, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144734} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.542731] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-314b173b-5156-495b-9ef3-9b4340fa6fdb tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1029.542917] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-314b173b-5156-495b-9ef3-9b4340fa6fdb tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1029.543377] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-314b173b-5156-495b-9ef3-9b4340fa6fdb tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1029.543605] env[70020]: INFO nova.compute.manager [None req-314b173b-5156-495b-9ef3-9b4340fa6fdb tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Took 1.63 seconds to destroy the instance on the hypervisor. [ 1029.543861] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-314b173b-5156-495b-9ef3-9b4340fa6fdb tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1029.544061] env[70020]: DEBUG nova.compute.manager [-] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1029.544359] env[70020]: DEBUG nova.network.neutron [-] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1029.613896] env[70020]: INFO nova.compute.manager [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Took 41.26 seconds to build instance. [ 1029.626295] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ea663fa-61ae-4dc1-8f05-36a08638c447 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Volume attach. Driver type: vmdk {{(pid=70020) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1029.627039] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ea663fa-61ae-4dc1-8f05-36a08638c447 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721768', 'volume_id': '0c646aab-21b0-4b8c-9b2a-36335a4b1275', 'name': 'volume-0c646aab-21b0-4b8c-9b2a-36335a4b1275', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'da07cb36-244f-4f48-a5b6-8d00324c1edf', 'attached_at': '', 'detached_at': '', 'volume_id': '0c646aab-21b0-4b8c-9b2a-36335a4b1275', 'serial': '0c646aab-21b0-4b8c-9b2a-36335a4b1275'} {{(pid=70020) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1029.627431] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f6a5f05-4533-480b-b4fc-8a2593bbee3b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.651082] env[70020]: DEBUG oslo_concurrency.lockutils [None req-17d0bab6-1f68-4f55-bf50-b736643a6b67 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "056141e3-5628-4451-bd25-f4fa15edd11e" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1029.655049] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74677d28-8d74-448c-a6ba-b7ff21511d91 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.701195] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ea663fa-61ae-4dc1-8f05-36a08638c447 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] volume-0c646aab-21b0-4b8c-9b2a-36335a4b1275/volume-0c646aab-21b0-4b8c-9b2a-36335a4b1275.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1029.703372] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f2ad4fe8-af7a-47a3-a6cf-05142cd8a5cf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.723744] env[70020]: DEBUG oslo_vmware.api [None req-1ea663fa-61ae-4dc1-8f05-36a08638c447 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for the task: (returnval){ [ 1029.723744] env[70020]: value = "task-3618757" [ 1029.723744] env[70020]: _type = "Task" [ 1029.723744] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.736618] env[70020]: DEBUG oslo_vmware.api [None req-1ea663fa-61ae-4dc1-8f05-36a08638c447 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618757, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.741161] env[70020]: DEBUG oslo_vmware.api [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618756, 'name': Rename_Task, 'duration_secs': 0.192114} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.741362] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1029.741604] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a8dfc53e-af98-49e1-a7d6-36b10231b5c2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.749363] env[70020]: DEBUG oslo_vmware.api [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1029.749363] env[70020]: value = "task-3618758" [ 1029.749363] env[70020]: _type = "Task" [ 1029.749363] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.758171] env[70020]: DEBUG oslo_vmware.api [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618758, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.799233] env[70020]: DEBUG nova.network.neutron [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Successfully created port: 9e9d26c4-eeea-4e28-84a1-156d81e4466a {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1029.933961] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1b9e4b3-cf1e-4747-97c9-53d13f1d20d4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.943751] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e49f76d8-5867-40bc-8beb-45ecbf066d26 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.977769] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c92cf89-1434-459d-9b5c-af6b6041155d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.986735] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-519fd03d-8124-4a43-bded-e29b6f879870 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.003208] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Releasing lock "refresh_cache-d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1030.005751] env[70020]: DEBUG nova.compute.provider_tree [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1030.116859] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2f59e5b5-96fb-40a3-95e2-5109883c01e7 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "056141e3-5628-4451-bd25-f4fa15edd11e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.772s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1030.118535] env[70020]: DEBUG oslo_concurrency.lockutils [None req-17d0bab6-1f68-4f55-bf50-b736643a6b67 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "056141e3-5628-4451-bd25-f4fa15edd11e" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.466s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1030.118535] env[70020]: DEBUG nova.compute.manager [None req-17d0bab6-1f68-4f55-bf50-b736643a6b67 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1030.118535] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a141cfbe-7ae8-4044-896f-6e88ce20c3fd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.125859] env[70020]: DEBUG nova.compute.manager [None req-17d0bab6-1f68-4f55-bf50-b736643a6b67 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=70020) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1030.126478] env[70020]: DEBUG nova.objects.instance [None req-17d0bab6-1f68-4f55-bf50-b736643a6b67 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lazy-loading 'flavor' on Instance uuid 056141e3-5628-4451-bd25-f4fa15edd11e {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1030.188694] env[70020]: DEBUG nova.compute.manager [req-a7a78365-92e6-4341-80ea-53a458fc88cd req-e7a1cde7-fbe7-4b08-bdd7-d77d896f6ec0 service nova] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Received event network-vif-deleted-7648a826-e268-4333-96ce-f336ff254b66 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1030.188894] env[70020]: INFO nova.compute.manager [req-a7a78365-92e6-4341-80ea-53a458fc88cd req-e7a1cde7-fbe7-4b08-bdd7-d77d896f6ec0 service nova] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Neutron deleted interface 7648a826-e268-4333-96ce-f336ff254b66; detaching it from the instance and deleting it from the info cache [ 1030.189080] env[70020]: DEBUG nova.network.neutron [req-a7a78365-92e6-4341-80ea-53a458fc88cd req-e7a1cde7-fbe7-4b08-bdd7-d77d896f6ec0 service nova] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.237306] env[70020]: DEBUG oslo_vmware.api [None req-1ea663fa-61ae-4dc1-8f05-36a08638c447 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618757, 'name': ReconfigVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.259273] env[70020]: DEBUG oslo_vmware.api [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618758, 'name': PowerOnVM_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.512295] env[70020]: DEBUG nova.scheduler.client.report [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1030.534084] env[70020]: DEBUG nova.compute.manager [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1030.559274] env[70020]: DEBUG nova.virt.hardware [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1030.559543] env[70020]: DEBUG nova.virt.hardware [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1030.559724] env[70020]: DEBUG nova.virt.hardware [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1030.559916] env[70020]: DEBUG nova.virt.hardware [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1030.560070] env[70020]: DEBUG nova.virt.hardware [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1030.560213] env[70020]: DEBUG nova.virt.hardware [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1030.560411] env[70020]: DEBUG nova.virt.hardware [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1030.560565] env[70020]: DEBUG nova.virt.hardware [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1030.560727] env[70020]: DEBUG nova.virt.hardware [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1030.560885] env[70020]: DEBUG nova.virt.hardware [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1030.561063] env[70020]: DEBUG nova.virt.hardware [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1030.563212] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f78bbc59-fb7a-481b-b155-989f80254b63 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.571497] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46b97b48-265a-41f1-8622-7c9c0366237b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.668253] env[70020]: DEBUG nova.network.neutron [-] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.692924] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4b7b5de5-5f3a-4c70-af53-57411150cc68 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.703201] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c229e83-b51f-4eb6-bada-2174952e9525 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.739100] env[70020]: DEBUG oslo_vmware.api [None req-1ea663fa-61ae-4dc1-8f05-36a08638c447 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618757, 'name': ReconfigVM_Task, 'duration_secs': 0.554535} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.739100] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ea663fa-61ae-4dc1-8f05-36a08638c447 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Reconfigured VM instance instance-00000052 to attach disk [datastore2] volume-0c646aab-21b0-4b8c-9b2a-36335a4b1275/volume-0c646aab-21b0-4b8c-9b2a-36335a4b1275.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1030.759033] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-df0cda19-cfb0-4259-8ad6-6a848c78b03c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.770043] env[70020]: DEBUG nova.compute.manager [req-a7a78365-92e6-4341-80ea-53a458fc88cd req-e7a1cde7-fbe7-4b08-bdd7-d77d896f6ec0 service nova] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Detach interface failed, port_id=7648a826-e268-4333-96ce-f336ff254b66, reason: Instance 45926a02-d0fe-4274-ba47-b97b3e12e4cd could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1030.779582] env[70020]: DEBUG oslo_vmware.api [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618758, 'name': PowerOnVM_Task, 'duration_secs': 0.562723} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.781072] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1030.781072] env[70020]: INFO nova.compute.manager [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Took 7.63 seconds to spawn the instance on the hypervisor. [ 1030.781205] env[70020]: DEBUG nova.compute.manager [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1030.781508] env[70020]: DEBUG oslo_vmware.api [None req-1ea663fa-61ae-4dc1-8f05-36a08638c447 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for the task: (returnval){ [ 1030.781508] env[70020]: value = "task-3618759" [ 1030.781508] env[70020]: _type = "Task" [ 1030.781508] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.782290] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51585468-795f-468d-ac18-05051e239ce6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.798440] env[70020]: DEBUG oslo_vmware.api [None req-1ea663fa-61ae-4dc1-8f05-36a08638c447 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618759, 'name': ReconfigVM_Task} progress is 10%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.017012] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.510s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1031.017012] env[70020]: DEBUG nova.compute.manager [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1031.018721] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.133s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1031.018926] env[70020]: DEBUG nova.objects.instance [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Lazy-loading 'pci_requests' on Instance uuid 2198e7f8-5458-4b97-abb3-0a3c932cebc2 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1031.134825] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-17d0bab6-1f68-4f55-bf50-b736643a6b67 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1031.135737] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-95ee9ca5-ed4e-4354-b7c5-454385d313cc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.143220] env[70020]: DEBUG oslo_vmware.api [None req-17d0bab6-1f68-4f55-bf50-b736643a6b67 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 1031.143220] env[70020]: value = "task-3618760" [ 1031.143220] env[70020]: _type = "Task" [ 1031.143220] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.152341] env[70020]: DEBUG oslo_vmware.api [None req-17d0bab6-1f68-4f55-bf50-b736643a6b67 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618760, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.172158] env[70020]: INFO nova.compute.manager [-] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Took 1.63 seconds to deallocate network for instance. [ 1031.295683] env[70020]: DEBUG oslo_vmware.api [None req-1ea663fa-61ae-4dc1-8f05-36a08638c447 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618759, 'name': ReconfigVM_Task, 'duration_secs': 0.169608} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.296308] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ea663fa-61ae-4dc1-8f05-36a08638c447 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721768', 'volume_id': '0c646aab-21b0-4b8c-9b2a-36335a4b1275', 'name': 'volume-0c646aab-21b0-4b8c-9b2a-36335a4b1275', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'da07cb36-244f-4f48-a5b6-8d00324c1edf', 'attached_at': '', 'detached_at': '', 'volume_id': '0c646aab-21b0-4b8c-9b2a-36335a4b1275', 'serial': '0c646aab-21b0-4b8c-9b2a-36335a4b1275'} {{(pid=70020) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1031.307908] env[70020]: INFO nova.compute.manager [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Took 42.55 seconds to build instance. [ 1031.526324] env[70020]: DEBUG nova.objects.instance [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Lazy-loading 'numa_topology' on Instance uuid 2198e7f8-5458-4b97-abb3-0a3c932cebc2 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1031.529733] env[70020]: DEBUG nova.compute.utils [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1031.533214] env[70020]: DEBUG nova.compute.manager [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1031.533214] env[70020]: DEBUG nova.network.neutron [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1031.559094] env[70020]: DEBUG nova.network.neutron [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Successfully updated port: 9e9d26c4-eeea-4e28-84a1-156d81e4466a {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1031.571618] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1031.571967] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-03eacae1-59e7-42f3-8264-f3da311cc62c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.584272] env[70020]: DEBUG oslo_vmware.api [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for the task: (returnval){ [ 1031.584272] env[70020]: value = "task-3618761" [ 1031.584272] env[70020]: _type = "Task" [ 1031.584272] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.592274] env[70020]: DEBUG nova.policy [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'afb49d648a70426fa7c39789e51ab625', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e7aae0b70f9d465ebcb9defe385fa434', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1031.598749] env[70020]: DEBUG oslo_vmware.api [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618761, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.658369] env[70020]: DEBUG oslo_vmware.api [None req-17d0bab6-1f68-4f55-bf50-b736643a6b67 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618760, 'name': PowerOffVM_Task, 'duration_secs': 0.210487} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.658825] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-17d0bab6-1f68-4f55-bf50-b736643a6b67 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1031.659239] env[70020]: DEBUG nova.compute.manager [None req-17d0bab6-1f68-4f55-bf50-b736643a6b67 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1031.660308] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d921559-4360-4c38-ae7e-c4ae1d42eb94 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.679338] env[70020]: DEBUG oslo_concurrency.lockutils [None req-314b173b-5156-495b-9ef3-9b4340fa6fdb tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1031.812223] env[70020]: DEBUG oslo_concurrency.lockutils [None req-11ad4b5a-7894-48b6-959d-7fe30ab59c46 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "0453722d-258f-49e3-b61e-f1081eb465c6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.067s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1031.893980] env[70020]: DEBUG nova.network.neutron [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Successfully created port: 36d80bdd-ca39-476a-91b5-601ea7cb1316 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1032.033303] env[70020]: INFO nova.compute.claims [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1032.038222] env[70020]: DEBUG nova.compute.manager [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1032.063249] env[70020]: DEBUG oslo_concurrency.lockutils [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquiring lock "refresh_cache-b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.063249] env[70020]: DEBUG oslo_concurrency.lockutils [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquired lock "refresh_cache-b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1032.063249] env[70020]: DEBUG nova.network.neutron [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1032.094820] env[70020]: DEBUG oslo_vmware.api [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618761, 'name': PowerOffVM_Task, 'duration_secs': 0.342968} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.095124] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1032.096228] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7edf2941-ccd4-49d9-ae38-b6d3a224d77c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.120714] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc2333c3-a486-40e1-a73c-8c4c7c15b24e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.135197] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "c972e083-8c91-4875-a8c6-8257b06c93a1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1032.135438] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "c972e083-8c91-4875-a8c6-8257b06c93a1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1032.162588] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1032.162756] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-39eee1e3-7a53-45c5-91e3-29211670adfd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.176318] env[70020]: DEBUG oslo_vmware.api [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for the task: (returnval){ [ 1032.176318] env[70020]: value = "task-3618762" [ 1032.176318] env[70020]: _type = "Task" [ 1032.176318] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.176466] env[70020]: DEBUG oslo_concurrency.lockutils [None req-17d0bab6-1f68-4f55-bf50-b736643a6b67 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "056141e3-5628-4451-bd25-f4fa15edd11e" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.059s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1032.189735] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] VM already powered off {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1032.189957] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1032.190254] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.190410] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1032.190597] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1032.191093] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4a04a63e-616d-4252-870d-27364c7806b3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.203378] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1032.203378] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1032.204119] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9a7a125-7f3f-441d-b939-1a331eb4e5ab {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.209601] env[70020]: DEBUG oslo_vmware.api [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for the task: (returnval){ [ 1032.209601] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52020b13-42c2-c507-5639-27671ae6e0d6" [ 1032.209601] env[70020]: _type = "Task" [ 1032.209601] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.217155] env[70020]: DEBUG oslo_vmware.api [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52020b13-42c2-c507-5639-27671ae6e0d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.348039] env[70020]: DEBUG nova.compute.manager [req-348dfbb5-a380-4338-b933-e1fd90e185f6 req-46ad0200-7a80-4e4d-b340-df5d92373aac service nova] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Received event network-vif-plugged-9e9d26c4-eeea-4e28-84a1-156d81e4466a {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1032.348039] env[70020]: DEBUG oslo_concurrency.lockutils [req-348dfbb5-a380-4338-b933-e1fd90e185f6 req-46ad0200-7a80-4e4d-b340-df5d92373aac service nova] Acquiring lock "b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1032.348039] env[70020]: DEBUG oslo_concurrency.lockutils [req-348dfbb5-a380-4338-b933-e1fd90e185f6 req-46ad0200-7a80-4e4d-b340-df5d92373aac service nova] Lock "b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1032.348189] env[70020]: DEBUG oslo_concurrency.lockutils [req-348dfbb5-a380-4338-b933-e1fd90e185f6 req-46ad0200-7a80-4e4d-b340-df5d92373aac service nova] Lock "b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1032.348833] env[70020]: DEBUG nova.compute.manager [req-348dfbb5-a380-4338-b933-e1fd90e185f6 req-46ad0200-7a80-4e4d-b340-df5d92373aac service nova] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] No waiting events found dispatching network-vif-plugged-9e9d26c4-eeea-4e28-84a1-156d81e4466a {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1032.348833] env[70020]: WARNING nova.compute.manager [req-348dfbb5-a380-4338-b933-e1fd90e185f6 req-46ad0200-7a80-4e4d-b340-df5d92373aac service nova] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Received unexpected event network-vif-plugged-9e9d26c4-eeea-4e28-84a1-156d81e4466a for instance with vm_state building and task_state spawning. [ 1032.348833] env[70020]: DEBUG nova.compute.manager [req-348dfbb5-a380-4338-b933-e1fd90e185f6 req-46ad0200-7a80-4e4d-b340-df5d92373aac service nova] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Received event network-changed-9e9d26c4-eeea-4e28-84a1-156d81e4466a {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1032.348833] env[70020]: DEBUG nova.compute.manager [req-348dfbb5-a380-4338-b933-e1fd90e185f6 req-46ad0200-7a80-4e4d-b340-df5d92373aac service nova] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Refreshing instance network info cache due to event network-changed-9e9d26c4-eeea-4e28-84a1-156d81e4466a. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1032.348997] env[70020]: DEBUG oslo_concurrency.lockutils [req-348dfbb5-a380-4338-b933-e1fd90e185f6 req-46ad0200-7a80-4e4d-b340-df5d92373aac service nova] Acquiring lock "refresh_cache-b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.362450] env[70020]: DEBUG nova.objects.instance [None req-1ea663fa-61ae-4dc1-8f05-36a08638c447 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lazy-loading 'flavor' on Instance uuid da07cb36-244f-4f48-a5b6-8d00324c1edf {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1032.598916] env[70020]: DEBUG nova.network.neutron [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1032.641296] env[70020]: DEBUG nova.compute.manager [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1032.719632] env[70020]: DEBUG oslo_vmware.api [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52020b13-42c2-c507-5639-27671ae6e0d6, 'name': SearchDatastore_Task, 'duration_secs': 0.014754} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.720481] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be90b10f-6c45-4063-bedf-8e39bf09d2fa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.725643] env[70020]: DEBUG oslo_vmware.api [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for the task: (returnval){ [ 1032.725643] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52f403f0-6eb6-cd08-1f49-0344db603085" [ 1032.725643] env[70020]: _type = "Task" [ 1032.725643] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.733065] env[70020]: DEBUG oslo_vmware.api [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52f403f0-6eb6-cd08-1f49-0344db603085, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.734529] env[70020]: DEBUG nova.network.neutron [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Updating instance_info_cache with network_info: [{"id": "9e9d26c4-eeea-4e28-84a1-156d81e4466a", "address": "fa:16:3e:17:d6:22", "network": {"id": "5481228b-9a6b-468e-bca7-0123c469ae56", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-340609977-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e7aae0b70f9d465ebcb9defe385fa434", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e9d26c4-ee", "ovs_interfaceid": "9e9d26c4-eeea-4e28-84a1-156d81e4466a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1032.867893] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1ea663fa-61ae-4dc1-8f05-36a08638c447 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "da07cb36-244f-4f48-a5b6-8d00324c1edf" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.919s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1032.910741] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "9d1568bf-4027-4d4c-b089-276006eee715" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1032.910741] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "9d1568bf-4027-4d4c-b089-276006eee715" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1032.912198] env[70020]: DEBUG oslo_concurrency.lockutils [None req-678c186b-0ee3-42e9-a900-e9149e2aecba tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "056141e3-5628-4451-bd25-f4fa15edd11e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1032.912405] env[70020]: DEBUG oslo_concurrency.lockutils [None req-678c186b-0ee3-42e9-a900-e9149e2aecba tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "056141e3-5628-4451-bd25-f4fa15edd11e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1032.912597] env[70020]: DEBUG oslo_concurrency.lockutils [None req-678c186b-0ee3-42e9-a900-e9149e2aecba tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "056141e3-5628-4451-bd25-f4fa15edd11e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1032.912779] env[70020]: DEBUG oslo_concurrency.lockutils [None req-678c186b-0ee3-42e9-a900-e9149e2aecba tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "056141e3-5628-4451-bd25-f4fa15edd11e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1032.912944] env[70020]: DEBUG oslo_concurrency.lockutils [None req-678c186b-0ee3-42e9-a900-e9149e2aecba tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "056141e3-5628-4451-bd25-f4fa15edd11e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1032.915516] env[70020]: INFO nova.compute.manager [None req-678c186b-0ee3-42e9-a900-e9149e2aecba tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Terminating instance [ 1033.050503] env[70020]: DEBUG nova.compute.manager [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1033.076510] env[70020]: DEBUG nova.virt.hardware [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1033.076835] env[70020]: DEBUG nova.virt.hardware [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1033.076983] env[70020]: DEBUG nova.virt.hardware [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1033.077204] env[70020]: DEBUG nova.virt.hardware [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1033.077362] env[70020]: DEBUG nova.virt.hardware [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1033.077532] env[70020]: DEBUG nova.virt.hardware [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1033.077803] env[70020]: DEBUG nova.virt.hardware [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1033.077999] env[70020]: DEBUG nova.virt.hardware [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1033.078165] env[70020]: DEBUG nova.virt.hardware [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1033.078330] env[70020]: DEBUG nova.virt.hardware [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1033.078526] env[70020]: DEBUG nova.virt.hardware [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1033.079451] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-321d9d15-6667-4f26-9712-d2418f31cfe3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.090671] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-686ae3c1-4411-4a05-9255-b2fd412497e7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.157580] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1033.236686] env[70020]: DEBUG oslo_concurrency.lockutils [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Releasing lock "refresh_cache-b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1033.236987] env[70020]: DEBUG nova.compute.manager [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Instance network_info: |[{"id": "9e9d26c4-eeea-4e28-84a1-156d81e4466a", "address": "fa:16:3e:17:d6:22", "network": {"id": "5481228b-9a6b-468e-bca7-0123c469ae56", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-340609977-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e7aae0b70f9d465ebcb9defe385fa434", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e9d26c4-ee", "ovs_interfaceid": "9e9d26c4-eeea-4e28-84a1-156d81e4466a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1033.238043] env[70020]: DEBUG oslo_concurrency.lockutils [req-348dfbb5-a380-4338-b933-e1fd90e185f6 req-46ad0200-7a80-4e4d-b340-df5d92373aac service nova] Acquired lock "refresh_cache-b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1033.238043] env[70020]: DEBUG nova.network.neutron [req-348dfbb5-a380-4338-b933-e1fd90e185f6 req-46ad0200-7a80-4e4d-b340-df5d92373aac service nova] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Refreshing network info cache for port 9e9d26c4-eeea-4e28-84a1-156d81e4466a {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1033.238767] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:d6:22', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92cdccfd-4b10-4024-b724-5f22792dd4de', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9e9d26c4-eeea-4e28-84a1-156d81e4466a', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1033.246654] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Creating folder: Project (e7aae0b70f9d465ebcb9defe385fa434). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1033.253137] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c80b9830-0fcc-4bc4-b1f8-8b251005ea45 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.254894] env[70020]: DEBUG oslo_vmware.api [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52f403f0-6eb6-cd08-1f49-0344db603085, 'name': SearchDatastore_Task, 'duration_secs': 0.009563} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.256485] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1033.256485] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52/c9cd83bf-fd12-4173-a067-f57d38f23556-rescue.vmdk. {{(pid=70020) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1033.256850] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d2b3d283-5b7d-4dae-98e4-1416ed7a9d1c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.265082] env[70020]: DEBUG oslo_vmware.api [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for the task: (returnval){ [ 1033.265082] env[70020]: value = "task-3618764" [ 1033.265082] env[70020]: _type = "Task" [ 1033.265082] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.271632] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Created folder: Project (e7aae0b70f9d465ebcb9defe385fa434) in parent group-v721521. [ 1033.271893] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Creating folder: Instances. Parent ref: group-v721770. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1033.272578] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dfab228b-c28e-4da6-950d-e55e0feabb23 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.277328] env[70020]: DEBUG oslo_vmware.api [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618764, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.285706] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Created folder: Instances in parent group-v721770. [ 1033.285941] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1033.286148] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1033.286352] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d5a45c16-d3bd-4be9-93e4-05ed3191c4ab {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.310885] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1033.310885] env[70020]: value = "task-3618766" [ 1033.310885] env[70020]: _type = "Task" [ 1033.310885] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.319110] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618766, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.382570] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc168926-d1e5-4436-a1e2-e801bab203e7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.391854] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caf46018-4026-4be8-b007-1c44a2b6e511 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.423912] env[70020]: DEBUG nova.compute.manager [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1033.427895] env[70020]: DEBUG nova.compute.manager [None req-678c186b-0ee3-42e9-a900-e9149e2aecba tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1033.427895] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-678c186b-0ee3-42e9-a900-e9149e2aecba tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1033.428201] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-182b276c-b6f9-487d-b86a-6e0be511854b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.435745] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9508a8b4-7309-487e-aa49-ecd5f66aba04 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.452379] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7076359-9949-4dd2-8256-8439225eee7e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.456655] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-678c186b-0ee3-42e9-a900-e9149e2aecba tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1033.457506] env[70020]: DEBUG nova.network.neutron [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Successfully updated port: 36d80bdd-ca39-476a-91b5-601ea7cb1316 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1033.461049] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d327983f-332b-40fd-a1d4-238815c84f2b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.474682] env[70020]: DEBUG nova.compute.provider_tree [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1033.537701] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-678c186b-0ee3-42e9-a900-e9149e2aecba tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1033.538055] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-678c186b-0ee3-42e9-a900-e9149e2aecba tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1033.538365] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-678c186b-0ee3-42e9-a900-e9149e2aecba tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Deleting the datastore file [datastore2] 056141e3-5628-4451-bd25-f4fa15edd11e {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1033.538712] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8e0142fa-139d-434a-8776-a13d197c84e1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.546850] env[70020]: DEBUG oslo_vmware.api [None req-678c186b-0ee3-42e9-a900-e9149e2aecba tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 1033.546850] env[70020]: value = "task-3618768" [ 1033.546850] env[70020]: _type = "Task" [ 1033.546850] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.555938] env[70020]: DEBUG oslo_vmware.api [None req-678c186b-0ee3-42e9-a900-e9149e2aecba tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618768, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.775627] env[70020]: DEBUG oslo_vmware.api [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618764, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.499053} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.775954] env[70020]: INFO nova.virt.vmwareapi.ds_util [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52/c9cd83bf-fd12-4173-a067-f57d38f23556-rescue.vmdk. [ 1033.776752] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c04c269-34cb-4fc2-857b-87494f14ad54 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.805645] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52/c9cd83bf-fd12-4173-a067-f57d38f23556-rescue.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1033.805981] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-55de0a5c-6ac3-43a7-8431-96b3bd96e6df {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.821448] env[70020]: DEBUG oslo_concurrency.lockutils [None req-046aa7dc-8960-4a47-89e0-6ee3279c5123 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Acquiring lock "da07cb36-244f-4f48-a5b6-8d00324c1edf" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1033.821678] env[70020]: DEBUG oslo_concurrency.lockutils [None req-046aa7dc-8960-4a47-89e0-6ee3279c5123 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "da07cb36-244f-4f48-a5b6-8d00324c1edf" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1033.830804] env[70020]: DEBUG oslo_vmware.api [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for the task: (returnval){ [ 1033.830804] env[70020]: value = "task-3618769" [ 1033.830804] env[70020]: _type = "Task" [ 1033.830804] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.833816] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618766, 'name': CreateVM_Task, 'duration_secs': 0.471966} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.837009] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1033.837849] env[70020]: DEBUG oslo_concurrency.lockutils [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.838017] env[70020]: DEBUG oslo_concurrency.lockutils [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1033.838318] env[70020]: DEBUG oslo_concurrency.lockutils [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1033.838828] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-321b6cb2-ec82-409d-97d3-bbbdfadb9f9c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.845877] env[70020]: DEBUG oslo_vmware.api [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618769, 'name': ReconfigVM_Task} progress is 10%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.848395] env[70020]: DEBUG oslo_vmware.api [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1033.848395] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52b324e2-6bc4-9d7c-1e0f-f13bcbab1f8a" [ 1033.848395] env[70020]: _type = "Task" [ 1033.848395] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.857035] env[70020]: DEBUG oslo_vmware.api [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b324e2-6bc4-9d7c-1e0f-f13bcbab1f8a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.945696] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1033.963795] env[70020]: DEBUG nova.network.neutron [req-348dfbb5-a380-4338-b933-e1fd90e185f6 req-46ad0200-7a80-4e4d-b340-df5d92373aac service nova] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Updated VIF entry in instance network info cache for port 9e9d26c4-eeea-4e28-84a1-156d81e4466a. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1033.964152] env[70020]: DEBUG nova.network.neutron [req-348dfbb5-a380-4338-b933-e1fd90e185f6 req-46ad0200-7a80-4e4d-b340-df5d92373aac service nova] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Updating instance_info_cache with network_info: [{"id": "9e9d26c4-eeea-4e28-84a1-156d81e4466a", "address": "fa:16:3e:17:d6:22", "network": {"id": "5481228b-9a6b-468e-bca7-0123c469ae56", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-340609977-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e7aae0b70f9d465ebcb9defe385fa434", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e9d26c4-ee", "ovs_interfaceid": "9e9d26c4-eeea-4e28-84a1-156d81e4466a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1033.965595] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquiring lock "refresh_cache-5b69d3b2-c236-45f9-b35b-a9992b9c1c79" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.965717] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquired lock "refresh_cache-5b69d3b2-c236-45f9-b35b-a9992b9c1c79" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1033.965854] env[70020]: DEBUG nova.network.neutron [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1033.978201] env[70020]: DEBUG nova.scheduler.client.report [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1034.056573] env[70020]: DEBUG oslo_vmware.api [None req-678c186b-0ee3-42e9-a900-e9149e2aecba tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618768, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.276409} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.056840] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-678c186b-0ee3-42e9-a900-e9149e2aecba tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1034.057060] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-678c186b-0ee3-42e9-a900-e9149e2aecba tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1034.057243] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-678c186b-0ee3-42e9-a900-e9149e2aecba tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1034.057413] env[70020]: INFO nova.compute.manager [None req-678c186b-0ee3-42e9-a900-e9149e2aecba tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1034.057692] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-678c186b-0ee3-42e9-a900-e9149e2aecba tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1034.057922] env[70020]: DEBUG nova.compute.manager [-] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1034.058033] env[70020]: DEBUG nova.network.neutron [-] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1034.328244] env[70020]: INFO nova.compute.manager [None req-046aa7dc-8960-4a47-89e0-6ee3279c5123 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Detaching volume 53da2c84-5ce4-4b98-93c7-9fe8956ff162 [ 1034.345395] env[70020]: DEBUG oslo_vmware.api [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618769, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.359130] env[70020]: DEBUG oslo_vmware.api [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b324e2-6bc4-9d7c-1e0f-f13bcbab1f8a, 'name': SearchDatastore_Task, 'duration_secs': 0.010544} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.359130] env[70020]: DEBUG oslo_concurrency.lockutils [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1034.359130] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1034.359130] env[70020]: DEBUG oslo_concurrency.lockutils [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.359130] env[70020]: DEBUG oslo_concurrency.lockutils [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1034.359326] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1034.359425] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b32b5215-7ee4-41f0-a125-800a8202f260 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.365537] env[70020]: INFO nova.virt.block_device [None req-046aa7dc-8960-4a47-89e0-6ee3279c5123 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Attempting to driver detach volume 53da2c84-5ce4-4b98-93c7-9fe8956ff162 from mountpoint /dev/sdb [ 1034.365817] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-046aa7dc-8960-4a47-89e0-6ee3279c5123 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Volume detach. Driver type: vmdk {{(pid=70020) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1034.366018] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-046aa7dc-8960-4a47-89e0-6ee3279c5123 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721763', 'volume_id': '53da2c84-5ce4-4b98-93c7-9fe8956ff162', 'name': 'volume-53da2c84-5ce4-4b98-93c7-9fe8956ff162', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'da07cb36-244f-4f48-a5b6-8d00324c1edf', 'attached_at': '', 'detached_at': '', 'volume_id': '53da2c84-5ce4-4b98-93c7-9fe8956ff162', 'serial': '53da2c84-5ce4-4b98-93c7-9fe8956ff162'} {{(pid=70020) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1034.367326] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b418df75-bc99-4c39-9f04-a07e35a1bcc8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.370614] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1034.370789] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1034.371777] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-951018eb-16bb-486a-ad58-71f5edd7f36b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.395101] env[70020]: DEBUG nova.compute.manager [req-d4b859cc-1385-4ac8-aaf9-98bef1a8491d req-27406109-4741-40a4-9b7d-8c60103d83c4 service nova] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Received event network-vif-plugged-36d80bdd-ca39-476a-91b5-601ea7cb1316 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1034.395301] env[70020]: DEBUG oslo_concurrency.lockutils [req-d4b859cc-1385-4ac8-aaf9-98bef1a8491d req-27406109-4741-40a4-9b7d-8c60103d83c4 service nova] Acquiring lock "5b69d3b2-c236-45f9-b35b-a9992b9c1c79-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.395510] env[70020]: DEBUG oslo_concurrency.lockutils [req-d4b859cc-1385-4ac8-aaf9-98bef1a8491d req-27406109-4741-40a4-9b7d-8c60103d83c4 service nova] Lock "5b69d3b2-c236-45f9-b35b-a9992b9c1c79-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.395676] env[70020]: DEBUG oslo_concurrency.lockutils [req-d4b859cc-1385-4ac8-aaf9-98bef1a8491d req-27406109-4741-40a4-9b7d-8c60103d83c4 service nova] Lock "5b69d3b2-c236-45f9-b35b-a9992b9c1c79-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1034.395859] env[70020]: DEBUG nova.compute.manager [req-d4b859cc-1385-4ac8-aaf9-98bef1a8491d req-27406109-4741-40a4-9b7d-8c60103d83c4 service nova] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] No waiting events found dispatching network-vif-plugged-36d80bdd-ca39-476a-91b5-601ea7cb1316 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1034.396034] env[70020]: WARNING nova.compute.manager [req-d4b859cc-1385-4ac8-aaf9-98bef1a8491d req-27406109-4741-40a4-9b7d-8c60103d83c4 service nova] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Received unexpected event network-vif-plugged-36d80bdd-ca39-476a-91b5-601ea7cb1316 for instance with vm_state building and task_state spawning. [ 1034.396198] env[70020]: DEBUG nova.compute.manager [req-d4b859cc-1385-4ac8-aaf9-98bef1a8491d req-27406109-4741-40a4-9b7d-8c60103d83c4 service nova] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Received event network-changed-36d80bdd-ca39-476a-91b5-601ea7cb1316 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1034.396350] env[70020]: DEBUG nova.compute.manager [req-d4b859cc-1385-4ac8-aaf9-98bef1a8491d req-27406109-4741-40a4-9b7d-8c60103d83c4 service nova] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Refreshing instance network info cache due to event network-changed-36d80bdd-ca39-476a-91b5-601ea7cb1316. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1034.396515] env[70020]: DEBUG oslo_concurrency.lockutils [req-d4b859cc-1385-4ac8-aaf9-98bef1a8491d req-27406109-4741-40a4-9b7d-8c60103d83c4 service nova] Acquiring lock "refresh_cache-5b69d3b2-c236-45f9-b35b-a9992b9c1c79" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.397810] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f02fe32-d406-4697-aa78-fcaa6c44de98 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.401501] env[70020]: DEBUG oslo_vmware.api [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1034.401501] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52e14eb8-46b0-31cd-0a5b-3e1dae755c96" [ 1034.401501] env[70020]: _type = "Task" [ 1034.401501] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.407027] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e281a9a9-c236-4bae-919c-e4f338b30c0b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.412249] env[70020]: DEBUG oslo_vmware.api [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52e14eb8-46b0-31cd-0a5b-3e1dae755c96, 'name': SearchDatastore_Task, 'duration_secs': 0.008605} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.413269] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f49d0cf5-60b7-434f-b5ac-370962d98cd2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.433904] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2845bcf-4c04-4006-b5f1-4aefc7e303de {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.437287] env[70020]: DEBUG oslo_vmware.api [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1034.437287] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52252cb8-0f62-9361-9723-d2c427d2f31d" [ 1034.437287] env[70020]: _type = "Task" [ 1034.437287] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.452506] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-046aa7dc-8960-4a47-89e0-6ee3279c5123 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] The volume has not been displaced from its original location: [datastore1] volume-53da2c84-5ce4-4b98-93c7-9fe8956ff162/volume-53da2c84-5ce4-4b98-93c7-9fe8956ff162.vmdk. No consolidation needed. {{(pid=70020) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1034.457597] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-046aa7dc-8960-4a47-89e0-6ee3279c5123 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Reconfiguring VM instance instance-00000052 to detach disk 2001 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1034.458307] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-77e46329-8503-4a96-ac55-b5ab967303fb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.474606] env[70020]: DEBUG oslo_concurrency.lockutils [req-348dfbb5-a380-4338-b933-e1fd90e185f6 req-46ad0200-7a80-4e4d-b340-df5d92373aac service nova] Releasing lock "refresh_cache-b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1034.476736] env[70020]: DEBUG oslo_vmware.api [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52252cb8-0f62-9361-9723-d2c427d2f31d, 'name': SearchDatastore_Task, 'duration_secs': 0.008519} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.477403] env[70020]: DEBUG oslo_concurrency.lockutils [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1034.477734] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d/b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1034.478277] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c2198a57-f606-4e99-8748-171622eb3f87 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.481506] env[70020]: DEBUG oslo_vmware.api [None req-046aa7dc-8960-4a47-89e0-6ee3279c5123 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for the task: (returnval){ [ 1034.481506] env[70020]: value = "task-3618770" [ 1034.481506] env[70020]: _type = "Task" [ 1034.481506] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.487255] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.469s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1034.489586] env[70020]: DEBUG oslo_vmware.api [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1034.489586] env[70020]: value = "task-3618771" [ 1034.489586] env[70020]: _type = "Task" [ 1034.489586] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.489794] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 30.780s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.489950] env[70020]: DEBUG nova.objects.instance [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Trying to apply a migration context that does not seem to be set for this instance {{(pid=70020) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1034.500188] env[70020]: DEBUG oslo_vmware.api [None req-046aa7dc-8960-4a47-89e0-6ee3279c5123 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618770, 'name': ReconfigVM_Task} progress is 10%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.505235] env[70020]: DEBUG oslo_vmware.api [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618771, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.530576] env[70020]: DEBUG nova.network.neutron [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1034.541015] env[70020]: INFO nova.network.neutron [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Updating port 52cf3b73-bbee-4e96-91f2-a1caa2041501 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1034.758014] env[70020]: DEBUG nova.network.neutron [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Updating instance_info_cache with network_info: [{"id": "36d80bdd-ca39-476a-91b5-601ea7cb1316", "address": "fa:16:3e:ff:ad:83", "network": {"id": "5481228b-9a6b-468e-bca7-0123c469ae56", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-340609977-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e7aae0b70f9d465ebcb9defe385fa434", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36d80bdd-ca", "ovs_interfaceid": "36d80bdd-ca39-476a-91b5-601ea7cb1316", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1034.792850] env[70020]: DEBUG nova.network.neutron [-] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1034.848671] env[70020]: DEBUG oslo_vmware.api [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618769, 'name': ReconfigVM_Task, 'duration_secs': 0.884137} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.848975] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Reconfigured VM instance instance-00000059 to attach disk [datastore1] d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52/c9cd83bf-fd12-4173-a067-f57d38f23556-rescue.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1034.850801] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25986b73-6328-409b-ab78-6324efd2a95b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.883035] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-51d92498-2ccd-41e4-bdda-f6d097516850 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.900472] env[70020]: DEBUG oslo_vmware.api [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for the task: (returnval){ [ 1034.900472] env[70020]: value = "task-3618772" [ 1034.900472] env[70020]: _type = "Task" [ 1034.900472] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.908519] env[70020]: DEBUG oslo_vmware.api [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618772, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.991966] env[70020]: DEBUG oslo_vmware.api [None req-046aa7dc-8960-4a47-89e0-6ee3279c5123 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618770, 'name': ReconfigVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.004392] env[70020]: DEBUG oslo_vmware.api [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618771, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.469212} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.005547] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d/b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1035.005824] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1035.006110] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f373b338-0585-43f7-af04-e3e1b7db690a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.011982] env[70020]: DEBUG oslo_vmware.api [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1035.011982] env[70020]: value = "task-3618773" [ 1035.011982] env[70020]: _type = "Task" [ 1035.011982] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.019242] env[70020]: DEBUG oslo_vmware.api [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618773, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.260751] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Releasing lock "refresh_cache-5b69d3b2-c236-45f9-b35b-a9992b9c1c79" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1035.261130] env[70020]: DEBUG nova.compute.manager [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Instance network_info: |[{"id": "36d80bdd-ca39-476a-91b5-601ea7cb1316", "address": "fa:16:3e:ff:ad:83", "network": {"id": "5481228b-9a6b-468e-bca7-0123c469ae56", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-340609977-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e7aae0b70f9d465ebcb9defe385fa434", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36d80bdd-ca", "ovs_interfaceid": "36d80bdd-ca39-476a-91b5-601ea7cb1316", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1035.261439] env[70020]: DEBUG oslo_concurrency.lockutils [req-d4b859cc-1385-4ac8-aaf9-98bef1a8491d req-27406109-4741-40a4-9b7d-8c60103d83c4 service nova] Acquired lock "refresh_cache-5b69d3b2-c236-45f9-b35b-a9992b9c1c79" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1035.261618] env[70020]: DEBUG nova.network.neutron [req-d4b859cc-1385-4ac8-aaf9-98bef1a8491d req-27406109-4741-40a4-9b7d-8c60103d83c4 service nova] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Refreshing network info cache for port 36d80bdd-ca39-476a-91b5-601ea7cb1316 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1035.262870] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ff:ad:83', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92cdccfd-4b10-4024-b724-5f22792dd4de', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '36d80bdd-ca39-476a-91b5-601ea7cb1316', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1035.270246] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1035.271162] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1035.271386] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9cfed916-e5fa-4060-8592-cf4290c2d9a1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.293698] env[70020]: INFO nova.compute.manager [-] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Took 1.24 seconds to deallocate network for instance. [ 1035.293991] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1035.293991] env[70020]: value = "task-3618774" [ 1035.293991] env[70020]: _type = "Task" [ 1035.293991] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.308725] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618774, 'name': CreateVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.409929] env[70020]: DEBUG oslo_vmware.api [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618772, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.492588] env[70020]: DEBUG oslo_vmware.api [None req-046aa7dc-8960-4a47-89e0-6ee3279c5123 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618770, 'name': ReconfigVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.500792] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73c5963a-8a7d-4a56-9648-6c63fb0d5dcc tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1035.501853] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.745s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1035.503315] env[70020]: INFO nova.compute.claims [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1035.522346] env[70020]: DEBUG oslo_vmware.api [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618773, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067873} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.522648] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1035.523535] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d43c0e6-eb2f-4021-8482-cb0b65023e58 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.547224] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Reconfiguring VM instance instance-0000005c to attach disk [datastore2] b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d/b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1035.548287] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-27a117f2-537e-4cc9-b53e-5ce0fc9144a2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.568737] env[70020]: DEBUG oslo_vmware.api [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1035.568737] env[70020]: value = "task-3618775" [ 1035.568737] env[70020]: _type = "Task" [ 1035.568737] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.577286] env[70020]: DEBUG oslo_vmware.api [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618775, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.804991] env[70020]: DEBUG oslo_concurrency.lockutils [None req-678c186b-0ee3-42e9-a900-e9149e2aecba tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1035.811214] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618774, 'name': CreateVM_Task, 'duration_secs': 0.389199} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.811444] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1035.812149] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.812319] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1035.812684] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1035.813021] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-592ea06f-f215-4bd3-8365-035ca8c6a61f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.818662] env[70020]: DEBUG oslo_vmware.api [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1035.818662] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]528f386d-ce08-f82d-d8d5-e953537660aa" [ 1035.818662] env[70020]: _type = "Task" [ 1035.818662] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.828096] env[70020]: DEBUG oslo_vmware.api [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]528f386d-ce08-f82d-d8d5-e953537660aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.911699] env[70020]: DEBUG oslo_vmware.api [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618772, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.993434] env[70020]: DEBUG oslo_vmware.api [None req-046aa7dc-8960-4a47-89e0-6ee3279c5123 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618770, 'name': ReconfigVM_Task, 'duration_secs': 1.396177} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.993719] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-046aa7dc-8960-4a47-89e0-6ee3279c5123 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Reconfigured VM instance instance-00000052 to detach disk 2001 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1035.998630] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8db03d13-3e67-4d58-a55d-190a6e9ffe2b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.018728] env[70020]: DEBUG oslo_vmware.api [None req-046aa7dc-8960-4a47-89e0-6ee3279c5123 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for the task: (returnval){ [ 1036.018728] env[70020]: value = "task-3618776" [ 1036.018728] env[70020]: _type = "Task" [ 1036.018728] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.026887] env[70020]: DEBUG nova.network.neutron [req-d4b859cc-1385-4ac8-aaf9-98bef1a8491d req-27406109-4741-40a4-9b7d-8c60103d83c4 service nova] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Updated VIF entry in instance network info cache for port 36d80bdd-ca39-476a-91b5-601ea7cb1316. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1036.027301] env[70020]: DEBUG nova.network.neutron [req-d4b859cc-1385-4ac8-aaf9-98bef1a8491d req-27406109-4741-40a4-9b7d-8c60103d83c4 service nova] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Updating instance_info_cache with network_info: [{"id": "36d80bdd-ca39-476a-91b5-601ea7cb1316", "address": "fa:16:3e:ff:ad:83", "network": {"id": "5481228b-9a6b-468e-bca7-0123c469ae56", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-340609977-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e7aae0b70f9d465ebcb9defe385fa434", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36d80bdd-ca", "ovs_interfaceid": "36d80bdd-ca39-476a-91b5-601ea7cb1316", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.040025] env[70020]: DEBUG oslo_vmware.api [None req-046aa7dc-8960-4a47-89e0-6ee3279c5123 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618776, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.079089] env[70020]: DEBUG oslo_vmware.api [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618775, 'name': ReconfigVM_Task, 'duration_secs': 0.302952} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.079382] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Reconfigured VM instance instance-0000005c to attach disk [datastore2] b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d/b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1036.080087] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4ff8e570-6c3e-4fb8-b6aa-3149c5138f11 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.086829] env[70020]: DEBUG oslo_vmware.api [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1036.086829] env[70020]: value = "task-3618777" [ 1036.086829] env[70020]: _type = "Task" [ 1036.086829] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.094477] env[70020]: DEBUG oslo_vmware.api [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618777, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.190042] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Acquiring lock "refresh_cache-2198e7f8-5458-4b97-abb3-0a3c932cebc2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.190085] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Acquired lock "refresh_cache-2198e7f8-5458-4b97-abb3-0a3c932cebc2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1036.190266] env[70020]: DEBUG nova.network.neutron [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1036.329719] env[70020]: DEBUG oslo_vmware.api [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]528f386d-ce08-f82d-d8d5-e953537660aa, 'name': SearchDatastore_Task, 'duration_secs': 0.012253} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.330087] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1036.330363] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1036.330634] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.330835] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1036.331065] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1036.331356] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5fd851f9-caa0-4c8a-a06c-20d7c67c4b8b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.339182] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1036.339397] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1036.340102] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d79f817a-9810-4c3f-b3bc-f4515dfb9d48 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.344908] env[70020]: DEBUG oslo_vmware.api [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1036.344908] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5237a1d6-b496-a83b-8214-44ede8e97359" [ 1036.344908] env[70020]: _type = "Task" [ 1036.344908] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.351849] env[70020]: DEBUG oslo_vmware.api [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5237a1d6-b496-a83b-8214-44ede8e97359, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.400797] env[70020]: DEBUG nova.compute.manager [req-797cde98-d262-46b0-b84b-f948c5d73083 req-43fdbc59-b46b-431c-ab9e-95192f2882eb service nova] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Received event network-vif-plugged-52cf3b73-bbee-4e96-91f2-a1caa2041501 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1036.401023] env[70020]: DEBUG oslo_concurrency.lockutils [req-797cde98-d262-46b0-b84b-f948c5d73083 req-43fdbc59-b46b-431c-ab9e-95192f2882eb service nova] Acquiring lock "2198e7f8-5458-4b97-abb3-0a3c932cebc2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1036.401242] env[70020]: DEBUG oslo_concurrency.lockutils [req-797cde98-d262-46b0-b84b-f948c5d73083 req-43fdbc59-b46b-431c-ab9e-95192f2882eb service nova] Lock "2198e7f8-5458-4b97-abb3-0a3c932cebc2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1036.401406] env[70020]: DEBUG oslo_concurrency.lockutils [req-797cde98-d262-46b0-b84b-f948c5d73083 req-43fdbc59-b46b-431c-ab9e-95192f2882eb service nova] Lock "2198e7f8-5458-4b97-abb3-0a3c932cebc2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1036.401570] env[70020]: DEBUG nova.compute.manager [req-797cde98-d262-46b0-b84b-f948c5d73083 req-43fdbc59-b46b-431c-ab9e-95192f2882eb service nova] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] No waiting events found dispatching network-vif-plugged-52cf3b73-bbee-4e96-91f2-a1caa2041501 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1036.401730] env[70020]: WARNING nova.compute.manager [req-797cde98-d262-46b0-b84b-f948c5d73083 req-43fdbc59-b46b-431c-ab9e-95192f2882eb service nova] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Received unexpected event network-vif-plugged-52cf3b73-bbee-4e96-91f2-a1caa2041501 for instance with vm_state shelved_offloaded and task_state spawning. [ 1036.401885] env[70020]: DEBUG nova.compute.manager [req-797cde98-d262-46b0-b84b-f948c5d73083 req-43fdbc59-b46b-431c-ab9e-95192f2882eb service nova] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Received event network-changed-52cf3b73-bbee-4e96-91f2-a1caa2041501 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1036.402048] env[70020]: DEBUG nova.compute.manager [req-797cde98-d262-46b0-b84b-f948c5d73083 req-43fdbc59-b46b-431c-ab9e-95192f2882eb service nova] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Refreshing instance network info cache due to event network-changed-52cf3b73-bbee-4e96-91f2-a1caa2041501. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1036.402214] env[70020]: DEBUG oslo_concurrency.lockutils [req-797cde98-d262-46b0-b84b-f948c5d73083 req-43fdbc59-b46b-431c-ab9e-95192f2882eb service nova] Acquiring lock "refresh_cache-2198e7f8-5458-4b97-abb3-0a3c932cebc2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.411395] env[70020]: DEBUG oslo_vmware.api [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618772, 'name': ReconfigVM_Task, 'duration_secs': 1.227179} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.411632] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1036.411865] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f8a2202d-b447-429a-82cf-0cc214be6783 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.417874] env[70020]: DEBUG oslo_vmware.api [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for the task: (returnval){ [ 1036.417874] env[70020]: value = "task-3618778" [ 1036.417874] env[70020]: _type = "Task" [ 1036.417874] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.425405] env[70020]: DEBUG oslo_vmware.api [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618778, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.529613] env[70020]: DEBUG oslo_concurrency.lockutils [req-d4b859cc-1385-4ac8-aaf9-98bef1a8491d req-27406109-4741-40a4-9b7d-8c60103d83c4 service nova] Releasing lock "refresh_cache-5b69d3b2-c236-45f9-b35b-a9992b9c1c79" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1036.529885] env[70020]: DEBUG nova.compute.manager [req-d4b859cc-1385-4ac8-aaf9-98bef1a8491d req-27406109-4741-40a4-9b7d-8c60103d83c4 service nova] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Received event network-vif-deleted-f3bb6c02-a473-447c-a316-a09dfd62af88 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1036.530106] env[70020]: INFO nova.compute.manager [req-d4b859cc-1385-4ac8-aaf9-98bef1a8491d req-27406109-4741-40a4-9b7d-8c60103d83c4 service nova] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Neutron deleted interface f3bb6c02-a473-447c-a316-a09dfd62af88; detaching it from the instance and deleting it from the info cache [ 1036.530289] env[70020]: DEBUG nova.network.neutron [req-d4b859cc-1385-4ac8-aaf9-98bef1a8491d req-27406109-4741-40a4-9b7d-8c60103d83c4 service nova] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.531458] env[70020]: DEBUG oslo_vmware.api [None req-046aa7dc-8960-4a47-89e0-6ee3279c5123 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618776, 'name': ReconfigVM_Task, 'duration_secs': 0.162227} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.531739] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-046aa7dc-8960-4a47-89e0-6ee3279c5123 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721763', 'volume_id': '53da2c84-5ce4-4b98-93c7-9fe8956ff162', 'name': 'volume-53da2c84-5ce4-4b98-93c7-9fe8956ff162', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'da07cb36-244f-4f48-a5b6-8d00324c1edf', 'attached_at': '', 'detached_at': '', 'volume_id': '53da2c84-5ce4-4b98-93c7-9fe8956ff162', 'serial': '53da2c84-5ce4-4b98-93c7-9fe8956ff162'} {{(pid=70020) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1036.600163] env[70020]: DEBUG oslo_vmware.api [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618777, 'name': Rename_Task, 'duration_secs': 0.143239} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.600163] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1036.600254] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b42a1d20-ca8a-480c-b8a3-3aa6cefc9c36 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.607109] env[70020]: DEBUG oslo_vmware.api [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1036.607109] env[70020]: value = "task-3618779" [ 1036.607109] env[70020]: _type = "Task" [ 1036.607109] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.615766] env[70020]: DEBUG oslo_vmware.api [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618779, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.821396] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a24a386-13cb-4d2b-a1f9-b6d28650793e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.828834] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4cc7bec-f93b-4268-a6af-2370b498ffec {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.865935] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-495795c8-d457-472e-8877-be320d9f12bf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.877339] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48ad2495-1917-4296-8e96-e9d9f2887cdd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.881173] env[70020]: DEBUG oslo_vmware.api [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5237a1d6-b496-a83b-8214-44ede8e97359, 'name': SearchDatastore_Task, 'duration_secs': 0.007966} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.884506] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d79399dd-c03e-479a-8568-86120412b0f6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.896015] env[70020]: DEBUG nova.compute.provider_tree [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1036.901214] env[70020]: DEBUG oslo_vmware.api [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1036.901214] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52dcaa54-220b-27d5-42b5-9e27d10ccbac" [ 1036.901214] env[70020]: _type = "Task" [ 1036.901214] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.910035] env[70020]: DEBUG oslo_vmware.api [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52dcaa54-220b-27d5-42b5-9e27d10ccbac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.919314] env[70020]: DEBUG nova.network.neutron [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Updating instance_info_cache with network_info: [{"id": "52cf3b73-bbee-4e96-91f2-a1caa2041501", "address": "fa:16:3e:13:c0:92", "network": {"id": "1d8737b3-4820-4ad1-8d76-9ab1a7db867e", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2006556938-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a63e8bb4fcd844f69aaeade95326a91b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f847601f-7479-48eb-842f-41f94eea8537", "external-id": "nsx-vlan-transportzone-35", "segmentation_id": 35, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52cf3b73-bb", "ovs_interfaceid": "52cf3b73-bbee-4e96-91f2-a1caa2041501", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.929607] env[70020]: DEBUG oslo_vmware.api [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618778, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.035749] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-85e30da2-97c7-4f1f-b03d-634860048b08 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.045640] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1968796-3f06-4061-ac86-0de692ddf88a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.075934] env[70020]: DEBUG nova.compute.manager [req-d4b859cc-1385-4ac8-aaf9-98bef1a8491d req-27406109-4741-40a4-9b7d-8c60103d83c4 service nova] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Detach interface failed, port_id=f3bb6c02-a473-447c-a316-a09dfd62af88, reason: Instance 056141e3-5628-4451-bd25-f4fa15edd11e could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1037.077503] env[70020]: DEBUG nova.objects.instance [None req-046aa7dc-8960-4a47-89e0-6ee3279c5123 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lazy-loading 'flavor' on Instance uuid da07cb36-244f-4f48-a5b6-8d00324c1edf {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1037.118836] env[70020]: DEBUG oslo_vmware.api [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618779, 'name': PowerOnVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.400320] env[70020]: DEBUG nova.scheduler.client.report [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1037.413703] env[70020]: DEBUG oslo_vmware.api [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52dcaa54-220b-27d5-42b5-9e27d10ccbac, 'name': SearchDatastore_Task, 'duration_secs': 0.011465} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.414036] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1037.414350] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 5b69d3b2-c236-45f9-b35b-a9992b9c1c79/5b69d3b2-c236-45f9-b35b-a9992b9c1c79.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1037.415683] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-41149873-78bf-465c-a8c9-e61d7e765dcd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.421792] env[70020]: DEBUG oslo_vmware.api [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1037.421792] env[70020]: value = "task-3618780" [ 1037.421792] env[70020]: _type = "Task" [ 1037.421792] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.426253] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Releasing lock "refresh_cache-2198e7f8-5458-4b97-abb3-0a3c932cebc2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1037.430820] env[70020]: DEBUG oslo_concurrency.lockutils [req-797cde98-d262-46b0-b84b-f948c5d73083 req-43fdbc59-b46b-431c-ab9e-95192f2882eb service nova] Acquired lock "refresh_cache-2198e7f8-5458-4b97-abb3-0a3c932cebc2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1037.431058] env[70020]: DEBUG nova.network.neutron [req-797cde98-d262-46b0-b84b-f948c5d73083 req-43fdbc59-b46b-431c-ab9e-95192f2882eb service nova] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Refreshing network info cache for port 52cf3b73-bbee-4e96-91f2-a1caa2041501 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1037.438298] env[70020]: DEBUG oslo_vmware.api [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618778, 'name': PowerOnVM_Task, 'duration_secs': 0.567936} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.441374] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1037.443263] env[70020]: DEBUG oslo_vmware.api [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618780, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.444761] env[70020]: DEBUG nova.compute.manager [None req-0adb288d-361b-4fff-b0ff-7811f3e0efba tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1037.445767] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c37f2499-0cda-45ac-8524-c80631cbf3cc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.464206] env[70020]: DEBUG nova.virt.hardware [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9c6df10b58b9402ef2850eb963ff26d6',container_format='bare',created_at=2025-04-25T23:06:55Z,direct_url=,disk_format='vmdk',id=ce5b528a-e2a0-4108-a61b-8585c8e0dc08,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-210630680-shelved',owner='a63e8bb4fcd844f69aaeade95326a91b',properties=ImageMetaProps,protected=,size=31669760,status='active',tags=,updated_at=2025-04-25T23:07:09Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1037.464408] env[70020]: DEBUG nova.virt.hardware [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1037.464630] env[70020]: DEBUG nova.virt.hardware [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1037.464828] env[70020]: DEBUG nova.virt.hardware [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1037.464971] env[70020]: DEBUG nova.virt.hardware [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1037.465131] env[70020]: DEBUG nova.virt.hardware [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1037.465333] env[70020]: DEBUG nova.virt.hardware [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1037.465487] env[70020]: DEBUG nova.virt.hardware [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1037.465673] env[70020]: DEBUG nova.virt.hardware [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1037.465846] env[70020]: DEBUG nova.virt.hardware [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1037.466085] env[70020]: DEBUG nova.virt.hardware [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1037.467173] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5b87e42-918b-4517-8226-7f39e39b0570 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.475963] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5f65d43-65e0-4977-abfd-1a98ce917a34 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.490790] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:13:c0:92', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f847601f-7479-48eb-842f-41f94eea8537', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '52cf3b73-bbee-4e96-91f2-a1caa2041501', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1037.499347] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1037.499958] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1037.500277] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8766b973-47bb-4fb2-b06c-a1a4adf5e719 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.522236] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1037.522236] env[70020]: value = "task-3618781" [ 1037.522236] env[70020]: _type = "Task" [ 1037.522236] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.529207] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618781, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.620362] env[70020]: DEBUG oslo_vmware.api [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618779, 'name': PowerOnVM_Task, 'duration_secs': 0.729287} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.620926] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1037.621168] env[70020]: INFO nova.compute.manager [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Took 7.09 seconds to spawn the instance on the hypervisor. [ 1037.621331] env[70020]: DEBUG nova.compute.manager [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1037.622211] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad0d17ae-fa1e-4fa3-ac14-d193fc899de3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.908981] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.407s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1037.909705] env[70020]: DEBUG nova.compute.manager [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1037.913253] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5ee4af63-d055-43c9-8388-9b514e2ab591 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.457s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1037.913531] env[70020]: DEBUG nova.objects.instance [None req-5ee4af63-d055-43c9-8388-9b514e2ab591 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lazy-loading 'resources' on Instance uuid 40fa0339-c221-4841-9444-dc957a95cf3b {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1037.938103] env[70020]: DEBUG oslo_vmware.api [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618780, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.032378] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618781, 'name': CreateVM_Task} progress is 25%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.086280] env[70020]: DEBUG oslo_concurrency.lockutils [None req-046aa7dc-8960-4a47-89e0-6ee3279c5123 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "da07cb36-244f-4f48-a5b6-8d00324c1edf" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.264s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1038.121570] env[70020]: DEBUG oslo_concurrency.lockutils [None req-65cf516f-0522-4d05-9099-28e771722fd3 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Acquiring lock "da07cb36-244f-4f48-a5b6-8d00324c1edf" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1038.121840] env[70020]: DEBUG oslo_concurrency.lockutils [None req-65cf516f-0522-4d05-9099-28e771722fd3 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "da07cb36-244f-4f48-a5b6-8d00324c1edf" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1038.142959] env[70020]: INFO nova.compute.manager [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Took 39.28 seconds to build instance. [ 1038.161558] env[70020]: DEBUG nova.network.neutron [req-797cde98-d262-46b0-b84b-f948c5d73083 req-43fdbc59-b46b-431c-ab9e-95192f2882eb service nova] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Updated VIF entry in instance network info cache for port 52cf3b73-bbee-4e96-91f2-a1caa2041501. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1038.161939] env[70020]: DEBUG nova.network.neutron [req-797cde98-d262-46b0-b84b-f948c5d73083 req-43fdbc59-b46b-431c-ab9e-95192f2882eb service nova] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Updating instance_info_cache with network_info: [{"id": "52cf3b73-bbee-4e96-91f2-a1caa2041501", "address": "fa:16:3e:13:c0:92", "network": {"id": "1d8737b3-4820-4ad1-8d76-9ab1a7db867e", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2006556938-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a63e8bb4fcd844f69aaeade95326a91b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f847601f-7479-48eb-842f-41f94eea8537", "external-id": "nsx-vlan-transportzone-35", "segmentation_id": 35, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52cf3b73-bb", "ovs_interfaceid": "52cf3b73-bbee-4e96-91f2-a1caa2041501", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1038.418123] env[70020]: DEBUG nova.compute.utils [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1038.422059] env[70020]: DEBUG nova.compute.manager [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1038.422249] env[70020]: DEBUG nova.network.neutron [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1038.436923] env[70020]: DEBUG oslo_vmware.api [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618780, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.547808} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.438026] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 5b69d3b2-c236-45f9-b35b-a9992b9c1c79/5b69d3b2-c236-45f9-b35b-a9992b9c1c79.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1038.438026] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1038.438026] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ed66716e-d110-441d-99bf-b9cb80b8db5a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.445753] env[70020]: DEBUG oslo_vmware.api [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1038.445753] env[70020]: value = "task-3618782" [ 1038.445753] env[70020]: _type = "Task" [ 1038.445753] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.455092] env[70020]: DEBUG oslo_vmware.api [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618782, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.465987] env[70020]: DEBUG nova.policy [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1374458c1943470eba7e774715ba1ca9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f3f6d704dd464768953c41d34d34d944', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1038.535835] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618781, 'name': CreateVM_Task, 'duration_secs': 0.637613} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.538032] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1038.538931] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce5b528a-e2a0-4108-a61b-8585c8e0dc08" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.539044] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce5b528a-e2a0-4108-a61b-8585c8e0dc08" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1038.539409] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce5b528a-e2a0-4108-a61b-8585c8e0dc08" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1038.539668] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60eee70c-5354-4327-b9c4-6b0b87e059ba {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.544263] env[70020]: DEBUG oslo_vmware.api [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for the task: (returnval){ [ 1038.544263] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5223f84d-2be7-ada4-be84-d1339331a04d" [ 1038.544263] env[70020]: _type = "Task" [ 1038.544263] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.554563] env[70020]: DEBUG oslo_vmware.api [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5223f84d-2be7-ada4-be84-d1339331a04d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.624451] env[70020]: INFO nova.compute.manager [None req-65cf516f-0522-4d05-9099-28e771722fd3 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Detaching volume 0c646aab-21b0-4b8c-9b2a-36335a4b1275 [ 1038.645384] env[70020]: DEBUG oslo_concurrency.lockutils [None req-46594279-c9cd-43fa-8048-6df6f9496b25 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lock "b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.793s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1038.665113] env[70020]: DEBUG oslo_concurrency.lockutils [req-797cde98-d262-46b0-b84b-f948c5d73083 req-43fdbc59-b46b-431c-ab9e-95192f2882eb service nova] Releasing lock "refresh_cache-2198e7f8-5458-4b97-abb3-0a3c932cebc2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1038.666427] env[70020]: INFO nova.virt.block_device [None req-65cf516f-0522-4d05-9099-28e771722fd3 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Attempting to driver detach volume 0c646aab-21b0-4b8c-9b2a-36335a4b1275 from mountpoint /dev/sdc [ 1038.666735] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-65cf516f-0522-4d05-9099-28e771722fd3 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Volume detach. Driver type: vmdk {{(pid=70020) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1038.666971] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-65cf516f-0522-4d05-9099-28e771722fd3 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721768', 'volume_id': '0c646aab-21b0-4b8c-9b2a-36335a4b1275', 'name': 'volume-0c646aab-21b0-4b8c-9b2a-36335a4b1275', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'da07cb36-244f-4f48-a5b6-8d00324c1edf', 'attached_at': '', 'detached_at': '', 'volume_id': '0c646aab-21b0-4b8c-9b2a-36335a4b1275', 'serial': '0c646aab-21b0-4b8c-9b2a-36335a4b1275'} {{(pid=70020) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1038.667905] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3455f377-dc18-4afd-b9ba-0e4e6b94651d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.696298] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1064201b-9f84-4bd7-87bf-7ddb462360d8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.707302] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1103baf6-a048-49c5-99cf-be5ae9a73d0a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.737314] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7c6199d-6c28-4cd0-9526-e0563512bc8b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.740539] env[70020]: DEBUG nova.network.neutron [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Successfully created port: 12ace8f2-7b35-437a-aba2-e371201f3343 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1038.756062] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-65cf516f-0522-4d05-9099-28e771722fd3 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] The volume has not been displaced from its original location: [datastore2] volume-0c646aab-21b0-4b8c-9b2a-36335a4b1275/volume-0c646aab-21b0-4b8c-9b2a-36335a4b1275.vmdk. No consolidation needed. {{(pid=70020) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1038.761273] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-65cf516f-0522-4d05-9099-28e771722fd3 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Reconfiguring VM instance instance-00000052 to detach disk 2002 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1038.764243] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5c1aad3d-1b10-46fe-954b-55100f2352f9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.784485] env[70020]: DEBUG oslo_vmware.api [None req-65cf516f-0522-4d05-9099-28e771722fd3 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for the task: (returnval){ [ 1038.784485] env[70020]: value = "task-3618783" [ 1038.784485] env[70020]: _type = "Task" [ 1038.784485] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.795287] env[70020]: DEBUG oslo_vmware.api [None req-65cf516f-0522-4d05-9099-28e771722fd3 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618783, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.800967] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f320d4a-d2b7-4a47-bfe0-70e62cd0de75 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.808096] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66ef7159-30a6-4a6a-885e-f56acccfd881 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.842572] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa73bda3-3af6-400c-8c47-50cc9110de3f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.845493] env[70020]: INFO nova.compute.manager [None req-278e55d3-a914-42e8-b2b7-0ea28b4b73f1 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Unrescuing [ 1038.845737] env[70020]: DEBUG oslo_concurrency.lockutils [None req-278e55d3-a914-42e8-b2b7-0ea28b4b73f1 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Acquiring lock "refresh_cache-d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.845950] env[70020]: DEBUG oslo_concurrency.lockutils [None req-278e55d3-a914-42e8-b2b7-0ea28b4b73f1 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Acquired lock "refresh_cache-d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1038.846130] env[70020]: DEBUG nova.network.neutron [None req-278e55d3-a914-42e8-b2b7-0ea28b4b73f1 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1038.853547] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0ba7054-a197-403a-820e-ce545f2bdccc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.869996] env[70020]: DEBUG nova.compute.provider_tree [None req-5ee4af63-d055-43c9-8388-9b514e2ab591 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1038.922610] env[70020]: DEBUG nova.compute.manager [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1038.956739] env[70020]: DEBUG oslo_vmware.api [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618782, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06917} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.957046] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1038.958308] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2d97b0a-8698-4fe4-a6e9-2ce68eca6617 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.987595] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Reconfiguring VM instance instance-0000005d to attach disk [datastore2] 5b69d3b2-c236-45f9-b35b-a9992b9c1c79/5b69d3b2-c236-45f9-b35b-a9992b9c1c79.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1038.988324] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2e74129a-980a-49bb-adf0-542ba574159e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.010447] env[70020]: DEBUG oslo_vmware.api [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1039.010447] env[70020]: value = "task-3618784" [ 1039.010447] env[70020]: _type = "Task" [ 1039.010447] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.018791] env[70020]: DEBUG oslo_vmware.api [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618784, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.057237] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce5b528a-e2a0-4108-a61b-8585c8e0dc08" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1039.057594] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Processing image ce5b528a-e2a0-4108-a61b-8585c8e0dc08 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1039.057933] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce5b528a-e2a0-4108-a61b-8585c8e0dc08/ce5b528a-e2a0-4108-a61b-8585c8e0dc08.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.058152] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce5b528a-e2a0-4108-a61b-8585c8e0dc08/ce5b528a-e2a0-4108-a61b-8585c8e0dc08.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1039.058437] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1039.058728] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9b3317b6-433e-4027-b9a2-25dfa9550aeb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.068595] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1039.068887] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1039.069852] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c8295c6-0493-476a-a4ed-e2720aa9af87 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.076529] env[70020]: DEBUG oslo_vmware.api [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for the task: (returnval){ [ 1039.076529] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]522235b5-dbda-f78b-fd5c-24193087fc43" [ 1039.076529] env[70020]: _type = "Task" [ 1039.076529] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.088284] env[70020]: DEBUG oslo_vmware.api [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]522235b5-dbda-f78b-fd5c-24193087fc43, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.294439] env[70020]: DEBUG oslo_vmware.api [None req-65cf516f-0522-4d05-9099-28e771722fd3 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618783, 'name': ReconfigVM_Task, 'duration_secs': 0.390913} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.294833] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-65cf516f-0522-4d05-9099-28e771722fd3 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Reconfigured VM instance instance-00000052 to detach disk 2002 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1039.299753] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-caff411c-3841-4eab-bd05-a48d6114be3f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.315182] env[70020]: DEBUG oslo_vmware.api [None req-65cf516f-0522-4d05-9099-28e771722fd3 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for the task: (returnval){ [ 1039.315182] env[70020]: value = "task-3618785" [ 1039.315182] env[70020]: _type = "Task" [ 1039.315182] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.323672] env[70020]: DEBUG oslo_vmware.api [None req-65cf516f-0522-4d05-9099-28e771722fd3 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618785, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.393432] env[70020]: ERROR nova.scheduler.client.report [None req-5ee4af63-d055-43c9-8388-9b514e2ab591 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [req-5a26aeb1-5729-4821-8c4b-4b836d266f3d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5a26aeb1-5729-4821-8c4b-4b836d266f3d"}]} [ 1039.410105] env[70020]: DEBUG nova.scheduler.client.report [None req-5ee4af63-d055-43c9-8388-9b514e2ab591 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1039.427020] env[70020]: DEBUG nova.scheduler.client.report [None req-5ee4af63-d055-43c9-8388-9b514e2ab591 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1039.427020] env[70020]: DEBUG nova.compute.provider_tree [None req-5ee4af63-d055-43c9-8388-9b514e2ab591 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1039.450192] env[70020]: DEBUG nova.scheduler.client.report [None req-5ee4af63-d055-43c9-8388-9b514e2ab591 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1039.486253] env[70020]: DEBUG nova.scheduler.client.report [None req-5ee4af63-d055-43c9-8388-9b514e2ab591 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1039.526611] env[70020]: DEBUG oslo_vmware.api [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618784, 'name': ReconfigVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.550889] env[70020]: DEBUG nova.network.neutron [None req-278e55d3-a914-42e8-b2b7-0ea28b4b73f1 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Updating instance_info_cache with network_info: [{"id": "02386321-e9cb-45ce-b235-d3c121d3cff1", "address": "fa:16:3e:c5:9d:a4", "network": {"id": "a4584b23-3c15-4ae9-b89e-0a0e14eeccb8", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1275535043-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6c8373e835ad4420890442390872c6fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02386321-e9", "ovs_interfaceid": "02386321-e9cb-45ce-b235-d3c121d3cff1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1039.590110] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Preparing fetch location {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1039.590382] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Fetch image to [datastore2] OSTACK_IMG_a290d12f-3ac6-4147-a129-1a3ae39c1e0d/OSTACK_IMG_a290d12f-3ac6-4147-a129-1a3ae39c1e0d.vmdk {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1039.590564] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Downloading stream optimized image ce5b528a-e2a0-4108-a61b-8585c8e0dc08 to [datastore2] OSTACK_IMG_a290d12f-3ac6-4147-a129-1a3ae39c1e0d/OSTACK_IMG_a290d12f-3ac6-4147-a129-1a3ae39c1e0d.vmdk on the data store datastore2 as vApp {{(pid=70020) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1039.590712] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Downloading image file data ce5b528a-e2a0-4108-a61b-8585c8e0dc08 to the ESX as VM named 'OSTACK_IMG_a290d12f-3ac6-4147-a129-1a3ae39c1e0d' {{(pid=70020) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1039.679643] env[70020]: DEBUG oslo_vmware.rw_handles [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1039.679643] env[70020]: value = "resgroup-9" [ 1039.679643] env[70020]: _type = "ResourcePool" [ 1039.679643] env[70020]: }. {{(pid=70020) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1039.679643] env[70020]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-506b24ad-7de6-41b5-b7a9-927204b95959 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.704157] env[70020]: DEBUG oslo_vmware.rw_handles [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Lease: (returnval){ [ 1039.704157] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]526a6aa7-3768-4e46-c58f-9ba3ae1357be" [ 1039.704157] env[70020]: _type = "HttpNfcLease" [ 1039.704157] env[70020]: } obtained for vApp import into resource pool (val){ [ 1039.704157] env[70020]: value = "resgroup-9" [ 1039.704157] env[70020]: _type = "ResourcePool" [ 1039.704157] env[70020]: }. {{(pid=70020) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1039.704157] env[70020]: DEBUG oslo_vmware.api [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for the lease: (returnval){ [ 1039.704157] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]526a6aa7-3768-4e46-c58f-9ba3ae1357be" [ 1039.704157] env[70020]: _type = "HttpNfcLease" [ 1039.704157] env[70020]: } to be ready. {{(pid=70020) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1039.712291] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1039.712291] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]526a6aa7-3768-4e46-c58f-9ba3ae1357be" [ 1039.712291] env[70020]: _type = "HttpNfcLease" [ 1039.712291] env[70020]: } is initializing. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1039.827675] env[70020]: DEBUG oslo_vmware.api [None req-65cf516f-0522-4d05-9099-28e771722fd3 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618785, 'name': ReconfigVM_Task, 'duration_secs': 0.161499} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.830821] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-65cf516f-0522-4d05-9099-28e771722fd3 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721768', 'volume_id': '0c646aab-21b0-4b8c-9b2a-36335a4b1275', 'name': 'volume-0c646aab-21b0-4b8c-9b2a-36335a4b1275', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'da07cb36-244f-4f48-a5b6-8d00324c1edf', 'attached_at': '', 'detached_at': '', 'volume_id': '0c646aab-21b0-4b8c-9b2a-36335a4b1275', 'serial': '0c646aab-21b0-4b8c-9b2a-36335a4b1275'} {{(pid=70020) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1039.871016] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1d701a2-f09a-457a-8a25-84f3daa62b83 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.878068] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72a07727-a865-4167-9017-c8ee77d351f5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.912007] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cef3ea9c-a862-4f40-927b-74b649e2f669 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.919882] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd8ec950-abd7-4f93-a81d-6514396c1318 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.934764] env[70020]: DEBUG nova.compute.manager [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1039.936994] env[70020]: DEBUG nova.compute.provider_tree [None req-5ee4af63-d055-43c9-8388-9b514e2ab591 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1039.962453] env[70020]: DEBUG nova.virt.hardware [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1039.962714] env[70020]: DEBUG nova.virt.hardware [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1039.962871] env[70020]: DEBUG nova.virt.hardware [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1039.963062] env[70020]: DEBUG nova.virt.hardware [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1039.963209] env[70020]: DEBUG nova.virt.hardware [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1039.963355] env[70020]: DEBUG nova.virt.hardware [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1039.963559] env[70020]: DEBUG nova.virt.hardware [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1039.963718] env[70020]: DEBUG nova.virt.hardware [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1039.963883] env[70020]: DEBUG nova.virt.hardware [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1039.964055] env[70020]: DEBUG nova.virt.hardware [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1039.964249] env[70020]: DEBUG nova.virt.hardware [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1039.965322] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01e265a9-9f52-4279-b701-c40276d1aac7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.976019] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26dadcf1-9d68-4673-9182-40f502946d74 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.020353] env[70020]: DEBUG oslo_vmware.api [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618784, 'name': ReconfigVM_Task, 'duration_secs': 0.515986} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.020521] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Reconfigured VM instance instance-0000005d to attach disk [datastore2] 5b69d3b2-c236-45f9-b35b-a9992b9c1c79/5b69d3b2-c236-45f9-b35b-a9992b9c1c79.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1040.021172] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ce0cc6e7-17bc-43c3-8b3a-05d978430a01 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.027835] env[70020]: DEBUG oslo_vmware.api [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1040.027835] env[70020]: value = "task-3618787" [ 1040.027835] env[70020]: _type = "Task" [ 1040.027835] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.036566] env[70020]: DEBUG oslo_vmware.api [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618787, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.057872] env[70020]: DEBUG oslo_concurrency.lockutils [None req-278e55d3-a914-42e8-b2b7-0ea28b4b73f1 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Releasing lock "refresh_cache-d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1040.059009] env[70020]: DEBUG nova.objects.instance [None req-278e55d3-a914-42e8-b2b7-0ea28b4b73f1 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Lazy-loading 'flavor' on Instance uuid d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1040.149515] env[70020]: DEBUG nova.compute.manager [req-4661abc2-11f1-4b50-b25b-f6e1978f2863 req-243beeb0-5202-4f7b-8c15-07c13c7ef011 service nova] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Received event network-vif-plugged-12ace8f2-7b35-437a-aba2-e371201f3343 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1040.149603] env[70020]: DEBUG oslo_concurrency.lockutils [req-4661abc2-11f1-4b50-b25b-f6e1978f2863 req-243beeb0-5202-4f7b-8c15-07c13c7ef011 service nova] Acquiring lock "04de1a07-cf38-41e0-be96-237bbe1ead83-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1040.149898] env[70020]: DEBUG oslo_concurrency.lockutils [req-4661abc2-11f1-4b50-b25b-f6e1978f2863 req-243beeb0-5202-4f7b-8c15-07c13c7ef011 service nova] Lock "04de1a07-cf38-41e0-be96-237bbe1ead83-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1040.150081] env[70020]: DEBUG oslo_concurrency.lockutils [req-4661abc2-11f1-4b50-b25b-f6e1978f2863 req-243beeb0-5202-4f7b-8c15-07c13c7ef011 service nova] Lock "04de1a07-cf38-41e0-be96-237bbe1ead83-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1040.150204] env[70020]: DEBUG nova.compute.manager [req-4661abc2-11f1-4b50-b25b-f6e1978f2863 req-243beeb0-5202-4f7b-8c15-07c13c7ef011 service nova] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] No waiting events found dispatching network-vif-plugged-12ace8f2-7b35-437a-aba2-e371201f3343 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1040.150368] env[70020]: WARNING nova.compute.manager [req-4661abc2-11f1-4b50-b25b-f6e1978f2863 req-243beeb0-5202-4f7b-8c15-07c13c7ef011 service nova] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Received unexpected event network-vif-plugged-12ace8f2-7b35-437a-aba2-e371201f3343 for instance with vm_state building and task_state spawning. [ 1040.212252] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1040.212252] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]526a6aa7-3768-4e46-c58f-9ba3ae1357be" [ 1040.212252] env[70020]: _type = "HttpNfcLease" [ 1040.212252] env[70020]: } is ready. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1040.212597] env[70020]: DEBUG oslo_vmware.rw_handles [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1040.212597] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]526a6aa7-3768-4e46-c58f-9ba3ae1357be" [ 1040.212597] env[70020]: _type = "HttpNfcLease" [ 1040.212597] env[70020]: }. {{(pid=70020) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1040.213274] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa054f8b-7123-446f-8567-eb157055614e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.220950] env[70020]: DEBUG oslo_vmware.rw_handles [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c33d03-683e-d8cc-a6dd-c699acc29593/disk-0.vmdk from lease info. {{(pid=70020) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1040.221146] env[70020]: DEBUG oslo_vmware.rw_handles [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Creating HTTP connection to write to file with size = 31669760 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c33d03-683e-d8cc-a6dd-c699acc29593/disk-0.vmdk. {{(pid=70020) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1040.279623] env[70020]: DEBUG nova.network.neutron [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Successfully updated port: 12ace8f2-7b35-437a-aba2-e371201f3343 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1040.285697] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-180a3541-8756-4316-883d-a4c2b4465912 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.384917] env[70020]: DEBUG nova.objects.instance [None req-65cf516f-0522-4d05-9099-28e771722fd3 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lazy-loading 'flavor' on Instance uuid da07cb36-244f-4f48-a5b6-8d00324c1edf {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1040.470466] env[70020]: DEBUG nova.scheduler.client.report [None req-5ee4af63-d055-43c9-8388-9b514e2ab591 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Updated inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with generation 127 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1040.470782] env[70020]: DEBUG nova.compute.provider_tree [None req-5ee4af63-d055-43c9-8388-9b514e2ab591 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Updating resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d generation from 127 to 128 during operation: update_inventory {{(pid=70020) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1040.470994] env[70020]: DEBUG nova.compute.provider_tree [None req-5ee4af63-d055-43c9-8388-9b514e2ab591 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1040.539875] env[70020]: DEBUG oslo_vmware.api [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618787, 'name': Rename_Task, 'duration_secs': 0.182401} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.541884] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1040.542162] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-40e88e6e-8f47-4cd4-8cd1-49f3e9d60a54 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.549150] env[70020]: DEBUG oslo_vmware.api [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1040.549150] env[70020]: value = "task-3618788" [ 1040.549150] env[70020]: _type = "Task" [ 1040.549150] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.559194] env[70020]: DEBUG oslo_vmware.api [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618788, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.564393] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff16f263-371c-4e30-a4c6-ff5d71743512 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.589112] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-278e55d3-a914-42e8-b2b7-0ea28b4b73f1 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1040.593894] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c22df60c-5579-43ef-a6fd-e1fceea62715 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.601075] env[70020]: DEBUG oslo_vmware.api [None req-278e55d3-a914-42e8-b2b7-0ea28b4b73f1 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for the task: (returnval){ [ 1040.601075] env[70020]: value = "task-3618789" [ 1040.601075] env[70020]: _type = "Task" [ 1040.601075] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.609352] env[70020]: DEBUG oslo_vmware.api [None req-278e55d3-a914-42e8-b2b7-0ea28b4b73f1 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618789, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.783346] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "refresh_cache-04de1a07-cf38-41e0-be96-237bbe1ead83" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1040.783346] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquired lock "refresh_cache-04de1a07-cf38-41e0-be96-237bbe1ead83" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1040.783346] env[70020]: DEBUG nova.network.neutron [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1040.978149] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5ee4af63-d055-43c9-8388-9b514e2ab591 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.066s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1040.982757] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c88234d3-f5e6-437a-82bc-2a153915e881 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.306s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1040.983043] env[70020]: DEBUG nova.objects.instance [None req-c88234d3-f5e6-437a-82bc-2a153915e881 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lazy-loading 'resources' on Instance uuid d45966fe-98ff-4466-8e7e-90550034742f {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1041.004021] env[70020]: INFO nova.scheduler.client.report [None req-5ee4af63-d055-43c9-8388-9b514e2ab591 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Deleted allocations for instance 40fa0339-c221-4841-9444-dc957a95cf3b [ 1041.060661] env[70020]: DEBUG oslo_vmware.api [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618788, 'name': PowerOnVM_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.112340] env[70020]: DEBUG oslo_vmware.api [None req-278e55d3-a914-42e8-b2b7-0ea28b4b73f1 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618789, 'name': PowerOffVM_Task, 'duration_secs': 0.272653} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.114621] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-278e55d3-a914-42e8-b2b7-0ea28b4b73f1 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1041.120272] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-278e55d3-a914-42e8-b2b7-0ea28b4b73f1 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Reconfiguring VM instance instance-00000059 to detach disk 2001 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1041.122743] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3e35c11e-592c-46da-963a-1b00001b383a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.146056] env[70020]: DEBUG oslo_vmware.api [None req-278e55d3-a914-42e8-b2b7-0ea28b4b73f1 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for the task: (returnval){ [ 1041.146056] env[70020]: value = "task-3618790" [ 1041.146056] env[70020]: _type = "Task" [ 1041.146056] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.157816] env[70020]: DEBUG oslo_vmware.api [None req-278e55d3-a914-42e8-b2b7-0ea28b4b73f1 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618790, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.317229] env[70020]: DEBUG nova.network.neutron [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1041.393305] env[70020]: DEBUG oslo_concurrency.lockutils [None req-65cf516f-0522-4d05-9099-28e771722fd3 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "da07cb36-244f-4f48-a5b6-8d00324c1edf" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.271s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1041.461526] env[70020]: DEBUG nova.network.neutron [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Updating instance_info_cache with network_info: [{"id": "12ace8f2-7b35-437a-aba2-e371201f3343", "address": "fa:16:3e:d7:48:3b", "network": {"id": "c7c6ab1d-12b0-4699-ab0e-47b8d23ee3bc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2032377329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3f6d704dd464768953c41d34d34d944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6a6f7bb-6106-4cfd-9aef-b85628d0cefa", "external-id": "nsx-vlan-transportzone-194", "segmentation_id": 194, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12ace8f2-7b", "ovs_interfaceid": "12ace8f2-7b35-437a-aba2-e371201f3343", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.471046] env[70020]: DEBUG oslo_vmware.rw_handles [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Completed reading data from the image iterator. {{(pid=70020) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1041.471046] env[70020]: DEBUG oslo_vmware.rw_handles [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c33d03-683e-d8cc-a6dd-c699acc29593/disk-0.vmdk. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1041.472032] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adb561c4-edba-4007-81df-5aac9c658d6e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.479984] env[70020]: DEBUG oslo_vmware.rw_handles [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c33d03-683e-d8cc-a6dd-c699acc29593/disk-0.vmdk is in state: ready. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1041.480184] env[70020]: DEBUG oslo_vmware.rw_handles [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c33d03-683e-d8cc-a6dd-c699acc29593/disk-0.vmdk. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1041.480447] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-201d7159-83f2-4528-8142-187bdde7fed3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.514923] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5ee4af63-d055-43c9-8388-9b514e2ab591 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "40fa0339-c221-4841-9444-dc957a95cf3b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.951s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1041.560138] env[70020]: DEBUG oslo_vmware.api [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618788, 'name': PowerOnVM_Task, 'duration_secs': 0.577623} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.562395] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1041.562649] env[70020]: INFO nova.compute.manager [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Took 8.51 seconds to spawn the instance on the hypervisor. [ 1041.562842] env[70020]: DEBUG nova.compute.manager [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1041.563980] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b56bdd9-d960-4cad-be93-3fa3611c198a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.657173] env[70020]: DEBUG oslo_vmware.api [None req-278e55d3-a914-42e8-b2b7-0ea28b4b73f1 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618790, 'name': ReconfigVM_Task, 'duration_secs': 0.25641} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.660296] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-278e55d3-a914-42e8-b2b7-0ea28b4b73f1 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Reconfigured VM instance instance-00000059 to detach disk 2001 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1041.660507] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-278e55d3-a914-42e8-b2b7-0ea28b4b73f1 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1041.660919] env[70020]: DEBUG oslo_vmware.rw_handles [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c33d03-683e-d8cc-a6dd-c699acc29593/disk-0.vmdk. {{(pid=70020) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1041.661133] env[70020]: INFO nova.virt.vmwareapi.images [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Downloaded image file data ce5b528a-e2a0-4108-a61b-8585c8e0dc08 [ 1041.662184] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d92096a9-ecf6-473a-bdd3-923285a2f7f9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.663553] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6929adf4-84de-4ced-a42a-6b14fc09e1a7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.683056] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c039e34c-6b81-4725-8076-99944bb465ad {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.684440] env[70020]: DEBUG oslo_vmware.api [None req-278e55d3-a914-42e8-b2b7-0ea28b4b73f1 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for the task: (returnval){ [ 1041.684440] env[70020]: value = "task-3618791" [ 1041.684440] env[70020]: _type = "Task" [ 1041.684440] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.692281] env[70020]: DEBUG oslo_vmware.api [None req-278e55d3-a914-42e8-b2b7-0ea28b4b73f1 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618791, 'name': PowerOnVM_Task} progress is 33%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.709141] env[70020]: INFO nova.virt.vmwareapi.images [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] The imported VM was unregistered [ 1041.711823] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Caching image {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1041.712137] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Creating directory with path [datastore2] devstack-image-cache_base/ce5b528a-e2a0-4108-a61b-8585c8e0dc08 {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1041.712465] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-771759d3-d521-4c2c-89f7-5e2bd5a00f30 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.742206] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Created directory with path [datastore2] devstack-image-cache_base/ce5b528a-e2a0-4108-a61b-8585c8e0dc08 {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1041.743372] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_a290d12f-3ac6-4147-a129-1a3ae39c1e0d/OSTACK_IMG_a290d12f-3ac6-4147-a129-1a3ae39c1e0d.vmdk to [datastore2] devstack-image-cache_base/ce5b528a-e2a0-4108-a61b-8585c8e0dc08/ce5b528a-e2a0-4108-a61b-8585c8e0dc08.vmdk. {{(pid=70020) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1041.743372] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-dd1a105b-bcd5-44fd-a872-e4a45ad8b747 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.751442] env[70020]: DEBUG oslo_vmware.api [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for the task: (returnval){ [ 1041.751442] env[70020]: value = "task-3618793" [ 1041.751442] env[70020]: _type = "Task" [ 1041.751442] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.758944] env[70020]: DEBUG oslo_vmware.api [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618793, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.760360] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2188ce40-af8a-44dd-81e4-71df7bd9d653 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.767161] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1938536f-6fe4-4ee7-bdf2-1676fb050282 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.799432] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcbe58db-f8ec-4925-bd4f-673331b7c2b0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.807454] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53c3bfcd-7493-45cb-825e-6d0aa3a998af {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.822985] env[70020]: DEBUG nova.compute.provider_tree [None req-c88234d3-f5e6-437a-82bc-2a153915e881 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1041.967781] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Releasing lock "refresh_cache-04de1a07-cf38-41e0-be96-237bbe1ead83" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1041.968639] env[70020]: DEBUG nova.compute.manager [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Instance network_info: |[{"id": "12ace8f2-7b35-437a-aba2-e371201f3343", "address": "fa:16:3e:d7:48:3b", "network": {"id": "c7c6ab1d-12b0-4699-ab0e-47b8d23ee3bc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2032377329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3f6d704dd464768953c41d34d34d944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6a6f7bb-6106-4cfd-9aef-b85628d0cefa", "external-id": "nsx-vlan-transportzone-194", "segmentation_id": 194, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12ace8f2-7b", "ovs_interfaceid": "12ace8f2-7b35-437a-aba2-e371201f3343", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1041.968639] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:48:3b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd6a6f7bb-6106-4cfd-9aef-b85628d0cefa', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '12ace8f2-7b35-437a-aba2-e371201f3343', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1041.978013] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Creating folder: Project (f3f6d704dd464768953c41d34d34d944). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1041.979028] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1e530e82-f8d2-4d33-b633-cdcc175473c7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.990211] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Created folder: Project (f3f6d704dd464768953c41d34d34d944) in parent group-v721521. [ 1041.990211] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Creating folder: Instances. Parent ref: group-v721776. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1041.990301] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-83eba7ad-d5fe-4e37-9f89-b188bedc6f1b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.002067] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Created folder: Instances in parent group-v721776. [ 1042.002368] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1042.002832] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1042.002919] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-18434ff2-d79e-4d4b-b33e-9a05e3b580ae {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.026172] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1042.026172] env[70020]: value = "task-3618796" [ 1042.026172] env[70020]: _type = "Task" [ 1042.026172] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.037504] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618796, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.088521] env[70020]: INFO nova.compute.manager [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Took 42.63 seconds to build instance. [ 1042.198972] env[70020]: DEBUG oslo_vmware.api [None req-278e55d3-a914-42e8-b2b7-0ea28b4b73f1 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618791, 'name': PowerOnVM_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.262393] env[70020]: DEBUG oslo_vmware.api [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618793, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.275167] env[70020]: DEBUG nova.compute.manager [req-cd906717-dd8d-486e-af91-c175aebf8833 req-e5f78bff-b91a-44ca-8da6-d9fc5ef4e3b1 service nova] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Received event network-changed-12ace8f2-7b35-437a-aba2-e371201f3343 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1042.275335] env[70020]: DEBUG nova.compute.manager [req-cd906717-dd8d-486e-af91-c175aebf8833 req-e5f78bff-b91a-44ca-8da6-d9fc5ef4e3b1 service nova] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Refreshing instance network info cache due to event network-changed-12ace8f2-7b35-437a-aba2-e371201f3343. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1042.275563] env[70020]: DEBUG oslo_concurrency.lockutils [req-cd906717-dd8d-486e-af91-c175aebf8833 req-e5f78bff-b91a-44ca-8da6-d9fc5ef4e3b1 service nova] Acquiring lock "refresh_cache-04de1a07-cf38-41e0-be96-237bbe1ead83" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1042.275695] env[70020]: DEBUG oslo_concurrency.lockutils [req-cd906717-dd8d-486e-af91-c175aebf8833 req-e5f78bff-b91a-44ca-8da6-d9fc5ef4e3b1 service nova] Acquired lock "refresh_cache-04de1a07-cf38-41e0-be96-237bbe1ead83" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1042.275850] env[70020]: DEBUG nova.network.neutron [req-cd906717-dd8d-486e-af91-c175aebf8833 req-e5f78bff-b91a-44ca-8da6-d9fc5ef4e3b1 service nova] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Refreshing network info cache for port 12ace8f2-7b35-437a-aba2-e371201f3343 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1042.326231] env[70020]: DEBUG nova.scheduler.client.report [None req-c88234d3-f5e6-437a-82bc-2a153915e881 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1042.537898] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618796, 'name': CreateVM_Task} progress is 25%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.591811] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d938b12-380a-4861-a71b-7fd579061a68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lock "5b69d3b2-c236-45f9-b35b-a9992b9c1c79" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.148s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1042.695729] env[70020]: DEBUG oslo_vmware.api [None req-278e55d3-a914-42e8-b2b7-0ea28b4b73f1 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618791, 'name': PowerOnVM_Task, 'duration_secs': 0.58403} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.696134] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-278e55d3-a914-42e8-b2b7-0ea28b4b73f1 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1042.696377] env[70020]: DEBUG nova.compute.manager [None req-278e55d3-a914-42e8-b2b7-0ea28b4b73f1 tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1042.697201] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8d06914-7814-475a-bd2d-fa137cb5a909 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.746768] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Acquiring lock "da07cb36-244f-4f48-a5b6-8d00324c1edf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1042.747057] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "da07cb36-244f-4f48-a5b6-8d00324c1edf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1042.747273] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Acquiring lock "da07cb36-244f-4f48-a5b6-8d00324c1edf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1042.747455] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "da07cb36-244f-4f48-a5b6-8d00324c1edf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1042.747624] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "da07cb36-244f-4f48-a5b6-8d00324c1edf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1042.750160] env[70020]: INFO nova.compute.manager [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Terminating instance [ 1042.762933] env[70020]: DEBUG oslo_vmware.api [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618793, 'name': MoveVirtualDisk_Task} progress is 29%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.832662] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c88234d3-f5e6-437a-82bc-2a153915e881 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.850s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1042.834989] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 28.943s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1042.860845] env[70020]: INFO nova.scheduler.client.report [None req-c88234d3-f5e6-437a-82bc-2a153915e881 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Deleted allocations for instance d45966fe-98ff-4466-8e7e-90550034742f [ 1042.995105] env[70020]: INFO nova.compute.manager [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Rescuing [ 1042.995484] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquiring lock "refresh_cache-5b69d3b2-c236-45f9-b35b-a9992b9c1c79" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1042.995709] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquired lock "refresh_cache-5b69d3b2-c236-45f9-b35b-a9992b9c1c79" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1042.995946] env[70020]: DEBUG nova.network.neutron [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1043.026120] env[70020]: DEBUG nova.network.neutron [req-cd906717-dd8d-486e-af91-c175aebf8833 req-e5f78bff-b91a-44ca-8da6-d9fc5ef4e3b1 service nova] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Updated VIF entry in instance network info cache for port 12ace8f2-7b35-437a-aba2-e371201f3343. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1043.026501] env[70020]: DEBUG nova.network.neutron [req-cd906717-dd8d-486e-af91-c175aebf8833 req-e5f78bff-b91a-44ca-8da6-d9fc5ef4e3b1 service nova] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Updating instance_info_cache with network_info: [{"id": "12ace8f2-7b35-437a-aba2-e371201f3343", "address": "fa:16:3e:d7:48:3b", "network": {"id": "c7c6ab1d-12b0-4699-ab0e-47b8d23ee3bc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2032377329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3f6d704dd464768953c41d34d34d944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6a6f7bb-6106-4cfd-9aef-b85628d0cefa", "external-id": "nsx-vlan-transportzone-194", "segmentation_id": 194, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12ace8f2-7b", "ovs_interfaceid": "12ace8f2-7b35-437a-aba2-e371201f3343", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1043.040837] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618796, 'name': CreateVM_Task, 'duration_secs': 0.973103} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.041023] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1043.041717] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.041885] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1043.042221] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1043.042783] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bcdc6a58-25e4-4486-9c30-0a9871acc7e8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.048452] env[70020]: DEBUG oslo_vmware.api [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1043.048452] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]525e9873-e70b-f16b-d9c5-b22e448dbbec" [ 1043.048452] env[70020]: _type = "Task" [ 1043.048452] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.057106] env[70020]: DEBUG oslo_vmware.api [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]525e9873-e70b-f16b-d9c5-b22e448dbbec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.254916] env[70020]: DEBUG nova.compute.manager [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1043.254916] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1043.259436] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e73372e-f00f-4256-b9b3-d5fa7b0c66b6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.269512] env[70020]: DEBUG oslo_vmware.api [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618793, 'name': MoveVirtualDisk_Task} progress is 52%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.271858] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1043.272241] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-82e8c3c5-71d5-458d-aabc-705148023ef7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.281283] env[70020]: DEBUG oslo_vmware.api [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for the task: (returnval){ [ 1043.281283] env[70020]: value = "task-3618797" [ 1043.281283] env[70020]: _type = "Task" [ 1043.281283] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.291776] env[70020]: DEBUG oslo_vmware.api [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618797, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.370977] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c88234d3-f5e6-437a-82bc-2a153915e881 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "d45966fe-98ff-4466-8e7e-90550034742f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.291s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1043.529886] env[70020]: DEBUG oslo_concurrency.lockutils [req-cd906717-dd8d-486e-af91-c175aebf8833 req-e5f78bff-b91a-44ca-8da6-d9fc5ef4e3b1 service nova] Releasing lock "refresh_cache-04de1a07-cf38-41e0-be96-237bbe1ead83" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1043.561429] env[70020]: DEBUG oslo_vmware.api [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]525e9873-e70b-f16b-d9c5-b22e448dbbec, 'name': SearchDatastore_Task, 'duration_secs': 0.022019} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.561791] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1043.561931] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1043.562194] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.562339] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1043.562619] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1043.563163] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-337512ef-5e8d-4bae-8d5a-2bd892e0fb26 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.576366] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1043.576550] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1043.577441] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a536bba-18c2-48f2-bd80-1b8eabb119b5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.586169] env[70020]: DEBUG oslo_vmware.api [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1043.586169] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]526c74d8-5e8f-5653-798c-4245e925dc0e" [ 1043.586169] env[70020]: _type = "Task" [ 1043.586169] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.597174] env[70020]: DEBUG oslo_vmware.api [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]526c74d8-5e8f-5653-798c-4245e925dc0e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.717369] env[70020]: DEBUG oslo_concurrency.lockutils [None req-12405c8e-d864-4f6b-b515-ff2357d0da1e tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Acquiring lock "d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1043.717687] env[70020]: DEBUG oslo_concurrency.lockutils [None req-12405c8e-d864-4f6b-b515-ff2357d0da1e tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Lock "d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1043.718036] env[70020]: DEBUG oslo_concurrency.lockutils [None req-12405c8e-d864-4f6b-b515-ff2357d0da1e tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Acquiring lock "d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1043.718342] env[70020]: DEBUG oslo_concurrency.lockutils [None req-12405c8e-d864-4f6b-b515-ff2357d0da1e tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Lock "d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1043.718624] env[70020]: DEBUG oslo_concurrency.lockutils [None req-12405c8e-d864-4f6b-b515-ff2357d0da1e tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Lock "d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1043.723109] env[70020]: INFO nova.compute.manager [None req-12405c8e-d864-4f6b-b515-ff2357d0da1e tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Terminating instance [ 1043.775498] env[70020]: DEBUG oslo_vmware.api [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618793, 'name': MoveVirtualDisk_Task} progress is 71%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.794225] env[70020]: DEBUG oslo_vmware.api [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618797, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.882212] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 8adadb2e-2a20-45b1-bed8-34e09df25f39 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1043.882370] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 13f6daa5-d859-40ed-b1b0-edd7717b8df3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1043.882494] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 5c216231-afc5-41df-a243-bb2a17c20bfe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1043.882682] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 96966bf2-a9ff-48ba-be3f-c767e7b6eedd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1043.882825] env[70020]: WARNING nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 45926a02-d0fe-4274-ba47-b97b3e12e4cd is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1043.882971] env[70020]: WARNING nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 3a4f2342-58e7-436b-a779-0fa093b52409 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1043.883129] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance da07cb36-244f-4f48-a5b6-8d00324c1edf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1043.883247] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance a39731d2-0b9b-41fa-b9ac-f80193a26d20 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1043.883370] env[70020]: WARNING nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 42d20396-883d-4141-a226-61f476057cbe is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1043.886847] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 2198e7f8-5458-4b97-abb3-0a3c932cebc2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1043.886847] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 3dedfa48-0839-462e-8c32-ba5252f07ac0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1043.886847] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1043.886847] env[70020]: WARNING nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 056141e3-5628-4451-bd25-f4fa15edd11e is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1043.886847] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 0453722d-258f-49e3-b61e-f1081eb465c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1043.886847] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1043.886847] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 5b69d3b2-c236-45f9-b35b-a9992b9c1c79 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1043.886847] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 04de1a07-cf38-41e0-be96-237bbe1ead83 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1044.056128] env[70020]: DEBUG nova.network.neutron [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Updating instance_info_cache with network_info: [{"id": "36d80bdd-ca39-476a-91b5-601ea7cb1316", "address": "fa:16:3e:ff:ad:83", "network": {"id": "5481228b-9a6b-468e-bca7-0123c469ae56", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-340609977-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e7aae0b70f9d465ebcb9defe385fa434", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36d80bdd-ca", "ovs_interfaceid": "36d80bdd-ca39-476a-91b5-601ea7cb1316", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1044.098837] env[70020]: DEBUG oslo_vmware.api [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]526c74d8-5e8f-5653-798c-4245e925dc0e, 'name': SearchDatastore_Task, 'duration_secs': 0.01942} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.100207] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9c13c90-6f54-4434-bf6d-e55e17067f04 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.107206] env[70020]: DEBUG oslo_vmware.api [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1044.107206] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5210ef1c-47a9-4c9d-68b5-c889cf6157ae" [ 1044.107206] env[70020]: _type = "Task" [ 1044.107206] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.116471] env[70020]: DEBUG oslo_vmware.api [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5210ef1c-47a9-4c9d-68b5-c889cf6157ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.231028] env[70020]: DEBUG nova.compute.manager [None req-12405c8e-d864-4f6b-b515-ff2357d0da1e tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1044.231028] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-12405c8e-d864-4f6b-b515-ff2357d0da1e tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1044.231537] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b53322e3-9a2a-43ab-9248-4297098fef1b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.240845] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-12405c8e-d864-4f6b-b515-ff2357d0da1e tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1044.241165] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0c756b17-aa8c-488b-ab9a-deb147258d49 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.248078] env[70020]: DEBUG oslo_vmware.api [None req-12405c8e-d864-4f6b-b515-ff2357d0da1e tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for the task: (returnval){ [ 1044.248078] env[70020]: value = "task-3618798" [ 1044.248078] env[70020]: _type = "Task" [ 1044.248078] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.257973] env[70020]: DEBUG oslo_vmware.api [None req-12405c8e-d864-4f6b-b515-ff2357d0da1e tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618798, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.267684] env[70020]: DEBUG oslo_vmware.api [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618793, 'name': MoveVirtualDisk_Task} progress is 91%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.291569] env[70020]: DEBUG oslo_vmware.api [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618797, 'name': PowerOffVM_Task, 'duration_secs': 0.871522} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.293164] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1044.293400] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1044.296724] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-abb1674d-996f-4970-86a5-352640df5d68 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.299041] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "8dbb1de0-38de-493f-9512-b8754bab7bcb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1044.299311] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "8dbb1de0-38de-493f-9512-b8754bab7bcb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1044.367586] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1044.367852] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1044.368027] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Deleting the datastore file [datastore1] da07cb36-244f-4f48-a5b6-8d00324c1edf {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1044.368342] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dc239316-c7a6-4e21-a2ed-acbbc19a1270 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.375911] env[70020]: DEBUG oslo_vmware.api [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for the task: (returnval){ [ 1044.375911] env[70020]: value = "task-3618800" [ 1044.375911] env[70020]: _type = "Task" [ 1044.375911] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.385279] env[70020]: DEBUG oslo_vmware.api [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618800, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.387159] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 1ddd5a29-075b-482a-a6e9-4c7345673a00 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1044.558498] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Releasing lock "refresh_cache-5b69d3b2-c236-45f9-b35b-a9992b9c1c79" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1044.617971] env[70020]: DEBUG oslo_vmware.api [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5210ef1c-47a9-4c9d-68b5-c889cf6157ae, 'name': SearchDatastore_Task, 'duration_secs': 0.02026} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.618275] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1044.618547] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 04de1a07-cf38-41e0-be96-237bbe1ead83/04de1a07-cf38-41e0-be96-237bbe1ead83.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1044.618816] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e5ff9073-2fc5-4f16-ad41-322fd0b9c56f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.626440] env[70020]: DEBUG oslo_vmware.api [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1044.626440] env[70020]: value = "task-3618801" [ 1044.626440] env[70020]: _type = "Task" [ 1044.626440] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.635959] env[70020]: DEBUG oslo_vmware.api [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618801, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.757254] env[70020]: DEBUG oslo_vmware.api [None req-12405c8e-d864-4f6b-b515-ff2357d0da1e tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618798, 'name': PowerOffVM_Task, 'duration_secs': 0.207273} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.757537] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-12405c8e-d864-4f6b-b515-ff2357d0da1e tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1044.757702] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-12405c8e-d864-4f6b-b515-ff2357d0da1e tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1044.757998] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-70772754-e461-480d-b4c3-414a8b80ee08 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.767986] env[70020]: DEBUG oslo_vmware.api [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618793, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.924234} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.768257] env[70020]: INFO nova.virt.vmwareapi.ds_util [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_a290d12f-3ac6-4147-a129-1a3ae39c1e0d/OSTACK_IMG_a290d12f-3ac6-4147-a129-1a3ae39c1e0d.vmdk to [datastore2] devstack-image-cache_base/ce5b528a-e2a0-4108-a61b-8585c8e0dc08/ce5b528a-e2a0-4108-a61b-8585c8e0dc08.vmdk. [ 1044.768441] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Cleaning up location [datastore2] OSTACK_IMG_a290d12f-3ac6-4147-a129-1a3ae39c1e0d {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1044.768606] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_a290d12f-3ac6-4147-a129-1a3ae39c1e0d {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1044.768841] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-13bb2b28-99c5-4854-bdfa-3bdeb43f7ac5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.774427] env[70020]: DEBUG oslo_vmware.api [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for the task: (returnval){ [ 1044.774427] env[70020]: value = "task-3618803" [ 1044.774427] env[70020]: _type = "Task" [ 1044.774427] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.782682] env[70020]: DEBUG oslo_vmware.api [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618803, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.802570] env[70020]: DEBUG nova.compute.manager [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1044.809794] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d255f120-e82c-4cec-8c64-9fd2a28de956 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "5c216231-afc5-41df-a243-bb2a17c20bfe" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1044.810353] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d255f120-e82c-4cec-8c64-9fd2a28de956 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "5c216231-afc5-41df-a243-bb2a17c20bfe" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1044.820189] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-12405c8e-d864-4f6b-b515-ff2357d0da1e tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1044.820499] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-12405c8e-d864-4f6b-b515-ff2357d0da1e tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1044.820783] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-12405c8e-d864-4f6b-b515-ff2357d0da1e tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Deleting the datastore file [datastore1] d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1044.821132] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9051f43e-ee29-4128-a2fc-cf03ef9dae66 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.828717] env[70020]: DEBUG oslo_vmware.api [None req-12405c8e-d864-4f6b-b515-ff2357d0da1e tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for the task: (returnval){ [ 1044.828717] env[70020]: value = "task-3618804" [ 1044.828717] env[70020]: _type = "Task" [ 1044.828717] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.838246] env[70020]: DEBUG oslo_vmware.api [None req-12405c8e-d864-4f6b-b515-ff2357d0da1e tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618804, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.886652] env[70020]: DEBUG oslo_vmware.api [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618800, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.890615] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 97fe6c57-03de-4cf8-a990-ff4f88db6cd7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1045.138178] env[70020]: DEBUG oslo_vmware.api [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618801, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.285611] env[70020]: DEBUG oslo_vmware.api [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618803, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.054624} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.285939] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1045.286170] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce5b528a-e2a0-4108-a61b-8585c8e0dc08/ce5b528a-e2a0-4108-a61b-8585c8e0dc08.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1045.286456] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce5b528a-e2a0-4108-a61b-8585c8e0dc08/ce5b528a-e2a0-4108-a61b-8585c8e0dc08.vmdk to [datastore2] 2198e7f8-5458-4b97-abb3-0a3c932cebc2/2198e7f8-5458-4b97-abb3-0a3c932cebc2.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1045.286744] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d9ef8334-4a32-4b19-a588-e14c18ef02d0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.295925] env[70020]: DEBUG oslo_vmware.api [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for the task: (returnval){ [ 1045.295925] env[70020]: value = "task-3618805" [ 1045.295925] env[70020]: _type = "Task" [ 1045.295925] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.305933] env[70020]: DEBUG oslo_vmware.api [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618805, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.314206] env[70020]: INFO nova.compute.manager [None req-d255f120-e82c-4cec-8c64-9fd2a28de956 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Detaching volume d582b3d1-9fab-425f-83f6-c90095c5e316 [ 1045.327776] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1045.341264] env[70020]: DEBUG oslo_vmware.api [None req-12405c8e-d864-4f6b-b515-ff2357d0da1e tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618804, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.360412] env[70020]: INFO nova.virt.block_device [None req-d255f120-e82c-4cec-8c64-9fd2a28de956 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Attempting to driver detach volume d582b3d1-9fab-425f-83f6-c90095c5e316 from mountpoint /dev/sdb [ 1045.360412] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-d255f120-e82c-4cec-8c64-9fd2a28de956 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Volume detach. Driver type: vmdk {{(pid=70020) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1045.360412] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-d255f120-e82c-4cec-8c64-9fd2a28de956 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721712', 'volume_id': 'd582b3d1-9fab-425f-83f6-c90095c5e316', 'name': 'volume-d582b3d1-9fab-425f-83f6-c90095c5e316', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5c216231-afc5-41df-a243-bb2a17c20bfe', 'attached_at': '', 'detached_at': '', 'volume_id': 'd582b3d1-9fab-425f-83f6-c90095c5e316', 'serial': 'd582b3d1-9fab-425f-83f6-c90095c5e316'} {{(pid=70020) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1045.360412] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6e55afb-ea78-4135-9503-6ef19eb7c1ab {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.384743] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be29a719-fbcf-402c-ac48-c10874a24f2d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.396260] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcf8643e-4b7b-4939-9eac-1d7dae327081 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.397187] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance c972e083-8c91-4875-a8c6-8257b06c93a1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1045.398485] env[70020]: DEBUG oslo_vmware.api [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Task: {'id': task-3618800, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.542505} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.398949] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1045.399161] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1045.399338] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1045.399505] env[70020]: INFO nova.compute.manager [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Took 2.15 seconds to destroy the instance on the hypervisor. [ 1045.399737] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1045.400269] env[70020]: DEBUG nova.compute.manager [-] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1045.400380] env[70020]: DEBUG nova.network.neutron [-] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1045.419353] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd954efd-7878-4309-9382-d2db550cb563 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.435670] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-d255f120-e82c-4cec-8c64-9fd2a28de956 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] The volume has not been displaced from its original location: [datastore2] volume-d582b3d1-9fab-425f-83f6-c90095c5e316/volume-d582b3d1-9fab-425f-83f6-c90095c5e316.vmdk. No consolidation needed. {{(pid=70020) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1045.440995] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-d255f120-e82c-4cec-8c64-9fd2a28de956 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Reconfiguring VM instance instance-00000037 to detach disk 2001 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1045.441609] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b3f32645-1949-427a-8f94-39ca09973048 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.459858] env[70020]: DEBUG oslo_vmware.api [None req-d255f120-e82c-4cec-8c64-9fd2a28de956 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 1045.459858] env[70020]: value = "task-3618806" [ 1045.459858] env[70020]: _type = "Task" [ 1045.459858] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.467919] env[70020]: DEBUG oslo_vmware.api [None req-d255f120-e82c-4cec-8c64-9fd2a28de956 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618806, 'name': ReconfigVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.636700] env[70020]: DEBUG oslo_vmware.api [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618801, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.856687} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.636939] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 04de1a07-cf38-41e0-be96-237bbe1ead83/04de1a07-cf38-41e0-be96-237bbe1ead83.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1045.637169] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1045.637426] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c3171f47-feac-42ae-a0d6-49ae279279a8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.643818] env[70020]: DEBUG oslo_vmware.api [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1045.643818] env[70020]: value = "task-3618807" [ 1045.643818] env[70020]: _type = "Task" [ 1045.643818] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.651142] env[70020]: DEBUG oslo_vmware.api [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618807, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.808223] env[70020]: DEBUG oslo_vmware.api [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618805, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.839577] env[70020]: DEBUG oslo_vmware.api [None req-12405c8e-d864-4f6b-b515-ff2357d0da1e tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618804, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.683824} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.839832] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-12405c8e-d864-4f6b-b515-ff2357d0da1e tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1045.840217] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-12405c8e-d864-4f6b-b515-ff2357d0da1e tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1045.840254] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-12405c8e-d864-4f6b-b515-ff2357d0da1e tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1045.840869] env[70020]: INFO nova.compute.manager [None req-12405c8e-d864-4f6b-b515-ff2357d0da1e tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Took 1.61 seconds to destroy the instance on the hypervisor. [ 1045.840869] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-12405c8e-d864-4f6b-b515-ff2357d0da1e tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1045.841098] env[70020]: DEBUG nova.compute.manager [-] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1045.841098] env[70020]: DEBUG nova.network.neutron [-] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1045.893304] env[70020]: DEBUG nova.compute.manager [req-b6f7ed24-5929-45c0-ba37-7da15544ffd6 req-a8fcf696-daae-4333-afc3-0bce6252cadb service nova] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Received event network-vif-deleted-9c6dd76e-1819-4f40-b5b1-e548b0c947ec {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1045.893304] env[70020]: INFO nova.compute.manager [req-b6f7ed24-5929-45c0-ba37-7da15544ffd6 req-a8fcf696-daae-4333-afc3-0bce6252cadb service nova] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Neutron deleted interface 9c6dd76e-1819-4f40-b5b1-e548b0c947ec; detaching it from the instance and deleting it from the info cache [ 1045.893304] env[70020]: DEBUG nova.network.neutron [req-b6f7ed24-5929-45c0-ba37-7da15544ffd6 req-a8fcf696-daae-4333-afc3-0bce6252cadb service nova] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.899509] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 9d1568bf-4027-4d4c-b089-276006eee715 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1045.970153] env[70020]: DEBUG oslo_vmware.api [None req-d255f120-e82c-4cec-8c64-9fd2a28de956 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618806, 'name': ReconfigVM_Task, 'duration_secs': 0.265984} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.970424] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-d255f120-e82c-4cec-8c64-9fd2a28de956 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Reconfigured VM instance instance-00000037 to detach disk 2001 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1045.974999] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fbde4ee8-8822-4b10-a002-d806baf0e45e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.989720] env[70020]: DEBUG oslo_vmware.api [None req-d255f120-e82c-4cec-8c64-9fd2a28de956 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 1045.989720] env[70020]: value = "task-3618808" [ 1045.989720] env[70020]: _type = "Task" [ 1045.989720] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.998253] env[70020]: DEBUG oslo_vmware.api [None req-d255f120-e82c-4cec-8c64-9fd2a28de956 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618808, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.095088] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1046.095437] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-047f2649-2161-4eab-ac84-6dfb91272160 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.103274] env[70020]: DEBUG oslo_vmware.api [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1046.103274] env[70020]: value = "task-3618809" [ 1046.103274] env[70020]: _type = "Task" [ 1046.103274] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.112692] env[70020]: DEBUG oslo_vmware.api [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618809, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.155451] env[70020]: DEBUG oslo_vmware.api [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618807, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064835} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.156866] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1046.157823] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0399660c-fa1f-4273-af75-5265b17ebdec {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.184913] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] 04de1a07-cf38-41e0-be96-237bbe1ead83/04de1a07-cf38-41e0-be96-237bbe1ead83.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1046.187224] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-06e280b7-1f9e-409b-a5c1-0c068a69ca54 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.209025] env[70020]: DEBUG oslo_vmware.api [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1046.209025] env[70020]: value = "task-3618810" [ 1046.209025] env[70020]: _type = "Task" [ 1046.209025] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.218793] env[70020]: DEBUG oslo_vmware.api [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618810, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.306527] env[70020]: DEBUG oslo_vmware.api [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618805, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.377894] env[70020]: DEBUG nova.network.neutron [-] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.395382] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b57ce1dd-3a96-46a0-ac0c-006414524cac {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.403250] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 8dbb1de0-38de-493f-9512-b8754bab7bcb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.403527] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Total usable vcpus: 48, total allocated vcpus: 13 {{(pid=70020) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1046.403672] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3008MB phys_disk=200GB used_disk=13GB total_vcpus=48 used_vcpus=13 pci_stats=[] {{(pid=70020) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1046.411513] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-196eac62-0983-459f-bed4-de7db9c89fb3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.452109] env[70020]: DEBUG nova.compute.manager [req-b6f7ed24-5929-45c0-ba37-7da15544ffd6 req-a8fcf696-daae-4333-afc3-0bce6252cadb service nova] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Detach interface failed, port_id=9c6dd76e-1819-4f40-b5b1-e548b0c947ec, reason: Instance da07cb36-244f-4f48-a5b6-8d00324c1edf could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1046.503703] env[70020]: DEBUG oslo_vmware.api [None req-d255f120-e82c-4cec-8c64-9fd2a28de956 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618808, 'name': ReconfigVM_Task, 'duration_secs': 0.154598} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.504348] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-d255f120-e82c-4cec-8c64-9fd2a28de956 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721712', 'volume_id': 'd582b3d1-9fab-425f-83f6-c90095c5e316', 'name': 'volume-d582b3d1-9fab-425f-83f6-c90095c5e316', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5c216231-afc5-41df-a243-bb2a17c20bfe', 'attached_at': '', 'detached_at': '', 'volume_id': 'd582b3d1-9fab-425f-83f6-c90095c5e316', 'serial': 'd582b3d1-9fab-425f-83f6-c90095c5e316'} {{(pid=70020) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1046.616338] env[70020]: DEBUG oslo_vmware.api [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618809, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.652903] env[70020]: DEBUG nova.network.neutron [-] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.723762] env[70020]: DEBUG oslo_vmware.api [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618810, 'name': ReconfigVM_Task, 'duration_secs': 0.505405} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.723974] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Reconfigured VM instance instance-0000005e to attach disk [datastore1] 04de1a07-cf38-41e0-be96-237bbe1ead83/04de1a07-cf38-41e0-be96-237bbe1ead83.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1046.724729] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a157ca81-de89-480c-87c0-7e36b1cd04de {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.732394] env[70020]: DEBUG oslo_vmware.api [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1046.732394] env[70020]: value = "task-3618811" [ 1046.732394] env[70020]: _type = "Task" [ 1046.732394] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.741752] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b16972c-5260-406f-b827-f1d929029513 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.749480] env[70020]: DEBUG oslo_vmware.api [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618811, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.756360] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f0f5f96-503d-49ae-94a3-1a01a48a1832 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.789920] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cebea334-3b2c-476f-80fd-08f25b59b372 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.801729] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1811ab2f-8fed-4985-9522-744364959b01 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.813479] env[70020]: DEBUG oslo_vmware.api [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618805, 'name': CopyVirtualDisk_Task} progress is 38%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.822094] env[70020]: DEBUG nova.compute.provider_tree [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1046.880555] env[70020]: INFO nova.compute.manager [-] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Took 1.48 seconds to deallocate network for instance. [ 1047.056221] env[70020]: DEBUG nova.objects.instance [None req-d255f120-e82c-4cec-8c64-9fd2a28de956 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lazy-loading 'flavor' on Instance uuid 5c216231-afc5-41df-a243-bb2a17c20bfe {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1047.114541] env[70020]: DEBUG oslo_vmware.api [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618809, 'name': PowerOffVM_Task, 'duration_secs': 0.974787} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.114928] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1047.115801] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-007b6b01-1f24-4196-a997-fd505048e718 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.136425] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0a849dd-7c6a-44ae-a120-d5e8c7f3e440 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.155978] env[70020]: INFO nova.compute.manager [-] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Took 1.31 seconds to deallocate network for instance. [ 1047.173390] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1047.173719] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e81af5f3-1bc4-4b4a-8ad6-f903fc931502 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.181243] env[70020]: DEBUG oslo_vmware.api [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1047.181243] env[70020]: value = "task-3618812" [ 1047.181243] env[70020]: _type = "Task" [ 1047.181243] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.191672] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] VM already powered off {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1047.191893] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1047.192163] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1047.192314] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1047.192494] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1047.193047] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-36f5d7d9-af35-4590-86fa-9f38ac320e75 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.210386] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1047.210598] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1047.211419] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48ff2fc7-fca2-40cc-9480-cb4c7190e659 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.217222] env[70020]: DEBUG oslo_vmware.api [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1047.217222] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52f0c772-c231-0810-d1f3-b31780fb0386" [ 1047.217222] env[70020]: _type = "Task" [ 1047.217222] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.226290] env[70020]: DEBUG oslo_vmware.api [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52f0c772-c231-0810-d1f3-b31780fb0386, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.243154] env[70020]: DEBUG oslo_vmware.api [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618811, 'name': Rename_Task, 'duration_secs': 0.230483} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.243787] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1047.246031] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fcf2676a-93bd-443b-9baf-9fa52c2e85f5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.251354] env[70020]: DEBUG oslo_vmware.api [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1047.251354] env[70020]: value = "task-3618813" [ 1047.251354] env[70020]: _type = "Task" [ 1047.251354] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.260179] env[70020]: DEBUG oslo_vmware.api [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618813, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.307429] env[70020]: DEBUG oslo_vmware.api [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618805, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.326031] env[70020]: DEBUG nova.scheduler.client.report [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1047.387755] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1047.662455] env[70020]: DEBUG oslo_concurrency.lockutils [None req-12405c8e-d864-4f6b-b515-ff2357d0da1e tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1047.730832] env[70020]: DEBUG oslo_vmware.api [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52f0c772-c231-0810-d1f3-b31780fb0386, 'name': SearchDatastore_Task, 'duration_secs': 0.090331} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.731689] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa1cdea6-7b39-4d02-9569-3bf297bd1f8e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.738603] env[70020]: DEBUG oslo_vmware.api [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1047.738603] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]528a2f99-ba14-cd3e-aba8-4ce599badc1d" [ 1047.738603] env[70020]: _type = "Task" [ 1047.738603] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.746908] env[70020]: DEBUG oslo_vmware.api [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]528a2f99-ba14-cd3e-aba8-4ce599badc1d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.760762] env[70020]: DEBUG oslo_vmware.api [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618813, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.807320] env[70020]: DEBUG oslo_vmware.api [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618805, 'name': CopyVirtualDisk_Task} progress is 80%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.831359] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=70020) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1047.831634] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.997s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1047.831967] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.924s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1047.833890] env[70020]: INFO nova.compute.claims [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1047.952366] env[70020]: DEBUG nova.compute.manager [req-c8236d44-32e0-490f-9451-bb14e5a3efb4 req-ea47945c-0300-481b-9a8e-0283489f83e0 service nova] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Received event network-vif-deleted-02386321-e9cb-45ce-b235-d3c121d3cff1 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1048.064873] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d255f120-e82c-4cec-8c64-9fd2a28de956 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "5c216231-afc5-41df-a243-bb2a17c20bfe" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.254s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1048.248282] env[70020]: DEBUG oslo_vmware.api [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]528a2f99-ba14-cd3e-aba8-4ce599badc1d, 'name': SearchDatastore_Task, 'duration_secs': 0.088295} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.248596] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1048.248792] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 5b69d3b2-c236-45f9-b35b-a9992b9c1c79/c9cd83bf-fd12-4173-a067-f57d38f23556-rescue.vmdk. {{(pid=70020) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1048.249063] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2796819a-cbbe-42fe-9e83-bbd9ca381f38 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.256426] env[70020]: DEBUG oslo_vmware.api [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1048.256426] env[70020]: value = "task-3618814" [ 1048.256426] env[70020]: _type = "Task" [ 1048.256426] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.262315] env[70020]: DEBUG oslo_vmware.api [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618813, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.266890] env[70020]: DEBUG oslo_vmware.api [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618814, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.307604] env[70020]: DEBUG oslo_vmware.api [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618805, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.942391} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.307877] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce5b528a-e2a0-4108-a61b-8585c8e0dc08/ce5b528a-e2a0-4108-a61b-8585c8e0dc08.vmdk to [datastore2] 2198e7f8-5458-4b97-abb3-0a3c932cebc2/2198e7f8-5458-4b97-abb3-0a3c932cebc2.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1048.308673] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dd86bc8-8596-4218-b402-7305e028dd92 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.330843] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Reconfiguring VM instance instance-0000002f to attach disk [datastore2] 2198e7f8-5458-4b97-abb3-0a3c932cebc2/2198e7f8-5458-4b97-abb3-0a3c932cebc2.vmdk or device None with type streamOptimized {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1048.331205] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-07a38749-0018-4b63-9a68-baaec9b31db3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.358402] env[70020]: DEBUG oslo_vmware.api [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for the task: (returnval){ [ 1048.358402] env[70020]: value = "task-3618815" [ 1048.358402] env[70020]: _type = "Task" [ 1048.358402] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.366529] env[70020]: DEBUG oslo_vmware.api [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618815, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.763784] env[70020]: DEBUG oslo_vmware.api [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618813, 'name': PowerOnVM_Task, 'duration_secs': 1.170321} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.764405] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1048.764655] env[70020]: INFO nova.compute.manager [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Took 8.83 seconds to spawn the instance on the hypervisor. [ 1048.764846] env[70020]: DEBUG nova.compute.manager [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1048.765583] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8f58703-222d-4f5e-9ae0-0a83cefa8d31 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.770713] env[70020]: DEBUG oslo_vmware.api [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618814, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.463091} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.771267] env[70020]: INFO nova.virt.vmwareapi.ds_util [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 5b69d3b2-c236-45f9-b35b-a9992b9c1c79/c9cd83bf-fd12-4173-a067-f57d38f23556-rescue.vmdk. [ 1048.771962] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b214b07c-48d0-4afc-b2c8-3bd2d352dfa5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.801586] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Reconfiguring VM instance instance-0000005d to attach disk [datastore2] 5b69d3b2-c236-45f9-b35b-a9992b9c1c79/c9cd83bf-fd12-4173-a067-f57d38f23556-rescue.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1048.802349] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b2d07e7a-0c95-4b1c-905c-4b95945a2290 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.822921] env[70020]: DEBUG oslo_vmware.api [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1048.822921] env[70020]: value = "task-3618816" [ 1048.822921] env[70020]: _type = "Task" [ 1048.822921] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.832085] env[70020]: DEBUG oslo_vmware.api [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618816, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.869397] env[70020]: DEBUG oslo_vmware.api [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618815, 'name': ReconfigVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.123196] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4931f539-2768-41a6-a8fd-91fbe1d7c8ee {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.130650] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-928fc287-0e4b-44ba-b798-bb3f079f8088 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.161380] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3f63570-65b6-40f3-bdcb-2f294f928e14 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.168496] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d839592-c179-4879-bd83-eeb1137352ca {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.172449] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e0c90b65-6fa2-406b-823f-784c1bfc90e4 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "5c216231-afc5-41df-a243-bb2a17c20bfe" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1049.172761] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e0c90b65-6fa2-406b-823f-784c1bfc90e4 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "5c216231-afc5-41df-a243-bb2a17c20bfe" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1049.172968] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e0c90b65-6fa2-406b-823f-784c1bfc90e4 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "5c216231-afc5-41df-a243-bb2a17c20bfe-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1049.173169] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e0c90b65-6fa2-406b-823f-784c1bfc90e4 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "5c216231-afc5-41df-a243-bb2a17c20bfe-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1049.173331] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e0c90b65-6fa2-406b-823f-784c1bfc90e4 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "5c216231-afc5-41df-a243-bb2a17c20bfe-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1049.175380] env[70020]: INFO nova.compute.manager [None req-e0c90b65-6fa2-406b-823f-784c1bfc90e4 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Terminating instance [ 1049.184302] env[70020]: DEBUG nova.compute.provider_tree [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1049.286774] env[70020]: INFO nova.compute.manager [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Took 45.55 seconds to build instance. [ 1049.333413] env[70020]: DEBUG oslo_vmware.api [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618816, 'name': ReconfigVM_Task, 'duration_secs': 0.296961} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.333711] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Reconfigured VM instance instance-0000005d to attach disk [datastore2] 5b69d3b2-c236-45f9-b35b-a9992b9c1c79/c9cd83bf-fd12-4173-a067-f57d38f23556-rescue.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1049.334582] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-340ee49c-05a3-48b2-a769-1faf6bc29b28 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.363083] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67915f6c-c08d-463e-a4f6-190914584fd2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.382262] env[70020]: DEBUG oslo_vmware.api [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618815, 'name': ReconfigVM_Task, 'duration_secs': 0.531283} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.383584] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Reconfigured VM instance instance-0000002f to attach disk [datastore2] 2198e7f8-5458-4b97-abb3-0a3c932cebc2/2198e7f8-5458-4b97-abb3-0a3c932cebc2.vmdk or device None with type streamOptimized {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1049.384304] env[70020]: DEBUG oslo_vmware.api [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1049.384304] env[70020]: value = "task-3618817" [ 1049.384304] env[70020]: _type = "Task" [ 1049.384304] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.384492] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a5a92ef2-00d9-4eef-aacb-97b3e9112abd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.395198] env[70020]: DEBUG oslo_vmware.api [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618817, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.396560] env[70020]: DEBUG oslo_vmware.api [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for the task: (returnval){ [ 1049.396560] env[70020]: value = "task-3618818" [ 1049.396560] env[70020]: _type = "Task" [ 1049.396560] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.404991] env[70020]: DEBUG oslo_vmware.api [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618818, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.687623] env[70020]: DEBUG nova.scheduler.client.report [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1049.691110] env[70020]: DEBUG nova.compute.manager [None req-e0c90b65-6fa2-406b-823f-784c1bfc90e4 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1049.691322] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c90b65-6fa2-406b-823f-784c1bfc90e4 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1049.692334] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-770e28bf-011b-49de-bdba-f2e638704b7f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.700564] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0c90b65-6fa2-406b-823f-784c1bfc90e4 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1049.700787] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-798bfa8b-7230-4644-893c-48304e5af6d9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.707153] env[70020]: DEBUG oslo_vmware.api [None req-e0c90b65-6fa2-406b-823f-784c1bfc90e4 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 1049.707153] env[70020]: value = "task-3618819" [ 1049.707153] env[70020]: _type = "Task" [ 1049.707153] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.714830] env[70020]: DEBUG oslo_vmware.api [None req-e0c90b65-6fa2-406b-823f-784c1bfc90e4 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618819, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.788714] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f1bc1af3-9678-4b46-b56a-3cacdfecca51 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "04de1a07-cf38-41e0-be96-237bbe1ead83" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.074s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1049.895577] env[70020]: DEBUG oslo_vmware.api [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618817, 'name': ReconfigVM_Task, 'duration_secs': 0.178941} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.895824] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1049.896107] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-78dbe6ce-dec4-4b3f-b9bb-b2a86935a256 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.907596] env[70020]: DEBUG oslo_vmware.api [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618818, 'name': Rename_Task, 'duration_secs': 0.151285} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.908820] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1049.909161] env[70020]: DEBUG oslo_vmware.api [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1049.909161] env[70020]: value = "task-3618820" [ 1049.909161] env[70020]: _type = "Task" [ 1049.909161] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.909349] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-49cc3868-1299-441c-afc7-3091b5031bc6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.918777] env[70020]: DEBUG oslo_vmware.api [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618820, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.919959] env[70020]: DEBUG oslo_vmware.api [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for the task: (returnval){ [ 1049.919959] env[70020]: value = "task-3618821" [ 1049.919959] env[70020]: _type = "Task" [ 1049.919959] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.927544] env[70020]: DEBUG oslo_vmware.api [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618821, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.193735] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.362s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1050.194333] env[70020]: DEBUG nova.compute.manager [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1050.197281] env[70020]: DEBUG oslo_concurrency.lockutils [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.743s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1050.198655] env[70020]: INFO nova.compute.claims [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1050.217359] env[70020]: DEBUG oslo_vmware.api [None req-e0c90b65-6fa2-406b-823f-784c1bfc90e4 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618819, 'name': PowerOffVM_Task, 'duration_secs': 0.202093} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.217359] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0c90b65-6fa2-406b-823f-784c1bfc90e4 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1050.217519] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c90b65-6fa2-406b-823f-784c1bfc90e4 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1050.218169] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f277449a-7145-4210-9794-2b6cdbcb6f5e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.281512] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c90b65-6fa2-406b-823f-784c1bfc90e4 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1050.281738] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c90b65-6fa2-406b-823f-784c1bfc90e4 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1050.281919] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0c90b65-6fa2-406b-823f-784c1bfc90e4 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Deleting the datastore file [datastore1] 5c216231-afc5-41df-a243-bb2a17c20bfe {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1050.282195] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4a50cf61-9b4c-4a13-b532-7adba38590ef {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.289608] env[70020]: DEBUG oslo_vmware.api [None req-e0c90b65-6fa2-406b-823f-784c1bfc90e4 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 1050.289608] env[70020]: value = "task-3618823" [ 1050.289608] env[70020]: _type = "Task" [ 1050.289608] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.297390] env[70020]: DEBUG oslo_vmware.api [None req-e0c90b65-6fa2-406b-823f-784c1bfc90e4 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618823, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.421596] env[70020]: DEBUG oslo_vmware.api [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618820, 'name': PowerOnVM_Task, 'duration_secs': 0.483432} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.424767] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1050.427522] env[70020]: DEBUG nova.compute.manager [None req-4d9adf97-6976-4469-93a4-43d34b1ed2c0 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1050.428576] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36c46a70-0ce0-4bd4-8057-1f21df68c237 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.436552] env[70020]: DEBUG oslo_vmware.api [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618821, 'name': PowerOnVM_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.708290] env[70020]: DEBUG nova.compute.utils [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1050.709831] env[70020]: DEBUG nova.compute.manager [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Not allocating networking since 'none' was specified. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1050.802664] env[70020]: DEBUG oslo_vmware.api [None req-e0c90b65-6fa2-406b-823f-784c1bfc90e4 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618823, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.50325} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.803256] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0c90b65-6fa2-406b-823f-784c1bfc90e4 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1050.803345] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c90b65-6fa2-406b-823f-784c1bfc90e4 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1050.803608] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c90b65-6fa2-406b-823f-784c1bfc90e4 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1050.803870] env[70020]: INFO nova.compute.manager [None req-e0c90b65-6fa2-406b-823f-784c1bfc90e4 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1050.804237] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e0c90b65-6fa2-406b-823f-784c1bfc90e4 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1050.804498] env[70020]: DEBUG nova.compute.manager [-] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1050.804663] env[70020]: DEBUG nova.network.neutron [-] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1050.850541] env[70020]: INFO nova.compute.manager [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Rebuilding instance [ 1050.904298] env[70020]: DEBUG nova.compute.manager [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1050.905211] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94b6a077-500b-4dff-b7ca-e34b21862781 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.930737] env[70020]: DEBUG oslo_vmware.api [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618821, 'name': PowerOnVM_Task, 'duration_secs': 0.979414} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.930737] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1051.042241] env[70020]: DEBUG nova.compute.manager [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1051.043063] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd63e6a1-0834-4edf-bcd3-1e73fa4387d6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.215359] env[70020]: DEBUG nova.compute.manager [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1051.397483] env[70020]: DEBUG nova.compute.manager [req-80049bff-4998-48fb-9827-e4ecf63430da req-0a15fb3f-1340-4176-a9e5-6ce317e3a3ef service nova] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Received event network-vif-deleted-9f5cbd43-e9e7-4f35-b01e-e2e0dcbc1f5e {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1051.397483] env[70020]: INFO nova.compute.manager [req-80049bff-4998-48fb-9827-e4ecf63430da req-0a15fb3f-1340-4176-a9e5-6ce317e3a3ef service nova] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Neutron deleted interface 9f5cbd43-e9e7-4f35-b01e-e2e0dcbc1f5e; detaching it from the instance and deleting it from the info cache [ 1051.397747] env[70020]: DEBUG nova.network.neutron [req-80049bff-4998-48fb-9827-e4ecf63430da req-0a15fb3f-1340-4176-a9e5-6ce317e3a3ef service nova] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1051.551076] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f860141-dc1b-4ef0-b4c1-4fd1f1a573fc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.563754] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7dff48fc-ed7e-432a-a992-52d280f60f9d tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Lock "2198e7f8-5458-4b97-abb3-0a3c932cebc2" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 50.706s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1051.566817] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8471da60-5fbd-4a54-875b-0767de8828a7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.601257] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad7d8701-0fde-4457-8a74-4c2a7294ab81 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.609520] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1db0c74b-8dc6-4bc2-8f1a-6dc0c8540180 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.625285] env[70020]: DEBUG nova.compute.provider_tree [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1051.811439] env[70020]: DEBUG nova.network.neutron [-] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1051.901155] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f5950fb9-33f5-4eae-9044-e68839ed14b5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.910745] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37a81408-fe44-4454-b620-aca8fae27760 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.922201] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1051.922635] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e2b5abc4-e8ac-4f2d-b1f0-dec3a4cb2bd5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.929792] env[70020]: DEBUG oslo_vmware.api [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1051.929792] env[70020]: value = "task-3618824" [ 1051.929792] env[70020]: _type = "Task" [ 1051.929792] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.937729] env[70020]: DEBUG oslo_vmware.api [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618824, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.948060] env[70020]: DEBUG nova.compute.manager [req-80049bff-4998-48fb-9827-e4ecf63430da req-0a15fb3f-1340-4176-a9e5-6ce317e3a3ef service nova] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Detach interface failed, port_id=9f5cbd43-e9e7-4f35-b01e-e2e0dcbc1f5e, reason: Instance 5c216231-afc5-41df-a243-bb2a17c20bfe could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1052.150448] env[70020]: ERROR nova.scheduler.client.report [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] [req-c71ee2c7-86de-4541-92de-5e5c97798914] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c71ee2c7-86de-4541-92de-5e5c97798914"}]} [ 1052.174044] env[70020]: DEBUG nova.scheduler.client.report [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1052.190746] env[70020]: DEBUG nova.scheduler.client.report [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1052.191072] env[70020]: DEBUG nova.compute.provider_tree [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1052.205097] env[70020]: DEBUG nova.scheduler.client.report [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1052.225703] env[70020]: DEBUG nova.scheduler.client.report [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1052.230451] env[70020]: DEBUG nova.compute.manager [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1052.260238] env[70020]: DEBUG nova.virt.hardware [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1052.260238] env[70020]: DEBUG nova.virt.hardware [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1052.260238] env[70020]: DEBUG nova.virt.hardware [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1052.260238] env[70020]: DEBUG nova.virt.hardware [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1052.260238] env[70020]: DEBUG nova.virt.hardware [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1052.260559] env[70020]: DEBUG nova.virt.hardware [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1052.260884] env[70020]: DEBUG nova.virt.hardware [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1052.261172] env[70020]: DEBUG nova.virt.hardware [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1052.261503] env[70020]: DEBUG nova.virt.hardware [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1052.261783] env[70020]: DEBUG nova.virt.hardware [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1052.262153] env[70020]: DEBUG nova.virt.hardware [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1052.263203] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2fe9308-7a52-48d7-b1f1-2136aa030771 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.278019] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df9a637a-5c69-4dd9-87eb-9b103affcd23 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.293240] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Instance VIF info [] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1052.298818] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Creating folder: Project (94d5b993851b4116b40d0c234d964be4). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1052.302649] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-892fc4a7-4b10-4364-9575-bbd7e4e10246 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.312811] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Created folder: Project (94d5b993851b4116b40d0c234d964be4) in parent group-v721521. [ 1052.313020] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Creating folder: Instances. Parent ref: group-v721779. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1052.313477] env[70020]: INFO nova.compute.manager [-] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Took 1.51 seconds to deallocate network for instance. [ 1052.313690] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c0a3e197-b40c-4db1-84c0-44dc5b353d9e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.325096] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Created folder: Instances in parent group-v721779. [ 1052.325096] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1052.327482] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1052.327910] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-55661b49-76f6-4eb7-9f65-2450397dd353 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.348913] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1052.348913] env[70020]: value = "task-3618827" [ 1052.348913] env[70020]: _type = "Task" [ 1052.348913] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.359516] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618827, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.445538] env[70020]: DEBUG oslo_vmware.api [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618824, 'name': PowerOffVM_Task, 'duration_secs': 0.300957} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.445886] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1052.446409] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1052.447132] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8b806c2-9328-463a-89db-f09275f66053 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.456920] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1052.457236] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3828b6d4-5c9d-4482-9978-a1875a45f32f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.516777] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1052.517244] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1052.517563] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Deleting the datastore file [datastore1] 04de1a07-cf38-41e0-be96-237bbe1ead83 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1052.522056] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1d4d6238-5d5e-47c8-beff-5d22e8d384af {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.529323] env[70020]: DEBUG oslo_vmware.api [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1052.529323] env[70020]: value = "task-3618829" [ 1052.529323] env[70020]: _type = "Task" [ 1052.529323] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.539969] env[70020]: DEBUG oslo_vmware.api [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618829, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.564127] env[70020]: INFO nova.compute.manager [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Rescuing [ 1052.564548] env[70020]: DEBUG oslo_concurrency.lockutils [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquiring lock "refresh_cache-b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1052.564825] env[70020]: DEBUG oslo_concurrency.lockutils [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquired lock "refresh_cache-b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1052.565078] env[70020]: DEBUG nova.network.neutron [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1052.581737] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8a9ba8a-ec4a-4650-958a-3ba37f27238e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.589755] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da87bd06-c8b9-4717-a5b8-d1b787599dbb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.622365] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20182d38-e27a-4eac-9685-9523a5f38475 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.629896] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59626cfc-0f16-4ae8-9a30-2292d3b2270c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.644920] env[70020]: DEBUG nova.compute.provider_tree [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1052.826857] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e0c90b65-6fa2-406b-823f-784c1bfc90e4 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1052.860845] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618827, 'name': CreateVM_Task, 'duration_secs': 0.347278} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.861044] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1052.861655] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1052.861655] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1052.861938] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1052.862219] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7f64945-03e5-4324-b3f9-12a944a69d5c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.866647] env[70020]: DEBUG oslo_vmware.api [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Waiting for the task: (returnval){ [ 1052.866647] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]524e08cd-ca83-003b-0e7d-f2fff4c61771" [ 1052.866647] env[70020]: _type = "Task" [ 1052.866647] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.874115] env[70020]: DEBUG oslo_vmware.api [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]524e08cd-ca83-003b-0e7d-f2fff4c61771, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.984142] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1929ccf8-48fb-4dc1-8331-db0e7903eda0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.990774] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-f74067d6-62b5-498d-9252-b4945d874d47 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Suspending the VM {{(pid=70020) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1052.991101] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-e9c7f19e-b507-42b2-bb8a-11d26393a2fc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.997267] env[70020]: DEBUG oslo_vmware.api [None req-f74067d6-62b5-498d-9252-b4945d874d47 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for the task: (returnval){ [ 1052.997267] env[70020]: value = "task-3618830" [ 1052.997267] env[70020]: _type = "Task" [ 1052.997267] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.005192] env[70020]: DEBUG oslo_vmware.api [None req-f74067d6-62b5-498d-9252-b4945d874d47 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618830, 'name': SuspendVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.038800] env[70020]: DEBUG oslo_vmware.api [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618829, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.202075} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.039084] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1053.039249] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1053.039422] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1053.181694] env[70020]: DEBUG nova.scheduler.client.report [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Updated inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with generation 129 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1053.181954] env[70020]: DEBUG nova.compute.provider_tree [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Updating resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d generation from 129 to 130 during operation: update_inventory {{(pid=70020) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1053.182183] env[70020]: DEBUG nova.compute.provider_tree [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1053.279218] env[70020]: DEBUG nova.network.neutron [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Updating instance_info_cache with network_info: [{"id": "9e9d26c4-eeea-4e28-84a1-156d81e4466a", "address": "fa:16:3e:17:d6:22", "network": {"id": "5481228b-9a6b-468e-bca7-0123c469ae56", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-340609977-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e7aae0b70f9d465ebcb9defe385fa434", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e9d26c4-ee", "ovs_interfaceid": "9e9d26c4-eeea-4e28-84a1-156d81e4466a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1053.378400] env[70020]: DEBUG oslo_vmware.api [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]524e08cd-ca83-003b-0e7d-f2fff4c61771, 'name': SearchDatastore_Task, 'duration_secs': 0.055787} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.378812] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1053.379061] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1053.379315] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1053.379496] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1053.379653] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1053.379915] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-36586ea5-93f7-4a5e-9521-558077c5f8c2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.392090] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1053.392304] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1053.393116] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6dfe485e-a4aa-406d-8a47-f8cc23f7cbbd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.399344] env[70020]: DEBUG oslo_vmware.api [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Waiting for the task: (returnval){ [ 1053.399344] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5213e696-3e28-8504-24f2-7fe21eb83bed" [ 1053.399344] env[70020]: _type = "Task" [ 1053.399344] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.407546] env[70020]: DEBUG oslo_vmware.api [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5213e696-3e28-8504-24f2-7fe21eb83bed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.507591] env[70020]: DEBUG oslo_vmware.api [None req-f74067d6-62b5-498d-9252-b4945d874d47 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618830, 'name': SuspendVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.687584] env[70020]: DEBUG oslo_concurrency.lockutils [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.490s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1053.688138] env[70020]: DEBUG nova.compute.manager [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1053.692357] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2426b0f8-dc96-48a5-98aa-7060541b5428 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.987s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1053.692559] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2426b0f8-dc96-48a5-98aa-7060541b5428 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1053.694741] env[70020]: DEBUG oslo_concurrency.lockutils [None req-50b00f0f-67d6-4a9f-b5c4-a1e2f49d5cab tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.115s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1053.694979] env[70020]: DEBUG oslo_concurrency.lockutils [None req-50b00f0f-67d6-4a9f-b5c4-a1e2f49d5cab tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1053.697221] env[70020]: DEBUG oslo_concurrency.lockutils [None req-314b173b-5156-495b-9ef3-9b4340fa6fdb tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.018s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1053.697380] env[70020]: DEBUG oslo_concurrency.lockutils [None req-314b173b-5156-495b-9ef3-9b4340fa6fdb tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1053.699043] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.542s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1053.700587] env[70020]: INFO nova.compute.claims [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1053.730137] env[70020]: INFO nova.scheduler.client.report [None req-2426b0f8-dc96-48a5-98aa-7060541b5428 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Deleted allocations for instance 3a4f2342-58e7-436b-a779-0fa093b52409 [ 1053.733662] env[70020]: INFO nova.scheduler.client.report [None req-314b173b-5156-495b-9ef3-9b4340fa6fdb tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Deleted allocations for instance 45926a02-d0fe-4274-ba47-b97b3e12e4cd [ 1053.750147] env[70020]: INFO nova.scheduler.client.report [None req-50b00f0f-67d6-4a9f-b5c4-a1e2f49d5cab tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Deleted allocations for instance 42d20396-883d-4141-a226-61f476057cbe [ 1053.781787] env[70020]: DEBUG oslo_concurrency.lockutils [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Releasing lock "refresh_cache-b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1053.909914] env[70020]: DEBUG oslo_vmware.api [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5213e696-3e28-8504-24f2-7fe21eb83bed, 'name': SearchDatastore_Task, 'duration_secs': 0.017053} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.910806] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05c5e6ff-5061-4a3a-9414-1f39e00472f0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.917255] env[70020]: DEBUG oslo_vmware.api [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Waiting for the task: (returnval){ [ 1053.917255] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]528b98dd-285e-0b83-c4c4-1c8afee7fe2e" [ 1053.917255] env[70020]: _type = "Task" [ 1053.917255] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.924889] env[70020]: DEBUG oslo_vmware.api [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]528b98dd-285e-0b83-c4c4-1c8afee7fe2e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.008226] env[70020]: DEBUG oslo_vmware.api [None req-f74067d6-62b5-498d-9252-b4945d874d47 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618830, 'name': SuspendVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.068933] env[70020]: DEBUG nova.virt.hardware [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1054.069147] env[70020]: DEBUG nova.virt.hardware [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1054.069309] env[70020]: DEBUG nova.virt.hardware [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1054.069493] env[70020]: DEBUG nova.virt.hardware [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1054.069637] env[70020]: DEBUG nova.virt.hardware [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1054.069780] env[70020]: DEBUG nova.virt.hardware [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1054.070034] env[70020]: DEBUG nova.virt.hardware [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1054.070200] env[70020]: DEBUG nova.virt.hardware [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1054.070364] env[70020]: DEBUG nova.virt.hardware [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1054.070521] env[70020]: DEBUG nova.virt.hardware [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1054.070729] env[70020]: DEBUG nova.virt.hardware [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1054.071636] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e196d8f-43a8-41d6-acfa-724f7e67022f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.079284] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6751523d-a0c6-4951-bcfb-d69ef34baf43 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.093307] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:48:3b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd6a6f7bb-6106-4cfd-9aef-b85628d0cefa', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '12ace8f2-7b35-437a-aba2-e371201f3343', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1054.100834] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1054.101081] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1054.101289] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3c3ea59d-f778-4ca3-8143-2bf7fc489171 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.120336] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1054.120336] env[70020]: value = "task-3618831" [ 1054.120336] env[70020]: _type = "Task" [ 1054.120336] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.127432] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618831, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.205152] env[70020]: DEBUG nova.compute.utils [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1054.210183] env[70020]: DEBUG nova.compute.manager [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1054.210183] env[70020]: DEBUG nova.network.neutron [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1054.244148] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2426b0f8-dc96-48a5-98aa-7060541b5428 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "3a4f2342-58e7-436b-a779-0fa093b52409" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.247s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1054.245142] env[70020]: DEBUG oslo_concurrency.lockutils [None req-314b173b-5156-495b-9ef3-9b4340fa6fdb tempest-ListServerFiltersTestJSON-1686378133 tempest-ListServerFiltersTestJSON-1686378133-project-member] Lock "45926a02-d0fe-4274-ba47-b97b3e12e4cd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.847s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1054.247251] env[70020]: DEBUG nova.policy [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd1a38841c8ef4fd48d07934b3dcd08fc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '945e9ff05aef4b8eb2b6376b62015464', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1054.257145] env[70020]: DEBUG oslo_concurrency.lockutils [None req-50b00f0f-67d6-4a9f-b5c4-a1e2f49d5cab tempest-ImagesTestJSON-851757871 tempest-ImagesTestJSON-851757871-project-member] Lock "42d20396-883d-4141-a226-61f476057cbe" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.120s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1054.430878] env[70020]: DEBUG oslo_vmware.api [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]528b98dd-285e-0b83-c4c4-1c8afee7fe2e, 'name': SearchDatastore_Task, 'duration_secs': 0.03342} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.430878] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1054.430878] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 1ddd5a29-075b-482a-a6e9-4c7345673a00/1ddd5a29-075b-482a-a6e9-4c7345673a00.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1054.430878] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e0dbc268-6fe8-48f0-9f04-41786cff8ef7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.437020] env[70020]: DEBUG oslo_vmware.api [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Waiting for the task: (returnval){ [ 1054.437020] env[70020]: value = "task-3618832" [ 1054.437020] env[70020]: _type = "Task" [ 1054.437020] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.443614] env[70020]: DEBUG oslo_vmware.api [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': task-3618832, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.501022] env[70020]: DEBUG nova.network.neutron [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Successfully created port: 658a9776-9b7b-4d90-86b3-79a86023c519 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1054.511988] env[70020]: DEBUG oslo_vmware.api [None req-f74067d6-62b5-498d-9252-b4945d874d47 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618830, 'name': SuspendVM_Task, 'duration_secs': 1.263743} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.512940] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-f74067d6-62b5-498d-9252-b4945d874d47 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Suspended the VM {{(pid=70020) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1054.513220] env[70020]: DEBUG nova.compute.manager [None req-f74067d6-62b5-498d-9252-b4945d874d47 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1054.514694] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd8df676-98e5-4615-8f6e-764dce4d2306 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.634070] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618831, 'name': CreateVM_Task, 'duration_secs': 0.498147} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.634588] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1054.635752] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1054.636247] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1054.637121] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1054.637687] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5fcfbe61-15de-4344-ae3e-e9d8681314c5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.648040] env[70020]: DEBUG oslo_vmware.api [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1054.648040] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52990a65-2c3b-4a77-9922-9161d0668198" [ 1054.648040] env[70020]: _type = "Task" [ 1054.648040] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.666953] env[70020]: DEBUG oslo_vmware.api [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52990a65-2c3b-4a77-9922-9161d0668198, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.715863] env[70020]: DEBUG nova.compute.manager [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1054.950230] env[70020]: DEBUG oslo_vmware.api [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': task-3618832, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.507397} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.952425] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 1ddd5a29-075b-482a-a6e9-4c7345673a00/1ddd5a29-075b-482a-a6e9-4c7345673a00.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1054.952963] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1054.958085] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8c5f9596-d8a0-4202-864a-0d0b25dc128a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.964024] env[70020]: DEBUG oslo_vmware.api [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Waiting for the task: (returnval){ [ 1054.964024] env[70020]: value = "task-3618833" [ 1054.964024] env[70020]: _type = "Task" [ 1054.964024] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.973164] env[70020]: DEBUG oslo_vmware.api [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': task-3618833, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.060832] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e2107e4-cad7-422a-9f01-9cb235798394 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.070622] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb1a70f9-3f17-45c0-b121-da9e86fe4af0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.111401] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62b63b30-e2d1-41f2-bc76-8ba58690289a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.119528] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31951ac3-7d93-4f07-8bac-8e15c06e206c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.133010] env[70020]: DEBUG nova.compute.provider_tree [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1055.156186] env[70020]: DEBUG oslo_vmware.api [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52990a65-2c3b-4a77-9922-9161d0668198, 'name': SearchDatastore_Task, 'duration_secs': 0.062362} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.156186] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1055.156186] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1055.156186] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1055.156427] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1055.156427] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1055.156656] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cec3ffe2-13c2-491a-83e7-cbead6c6427d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.165368] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1055.165603] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1055.166390] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4213104-cdfc-4ead-ac69-fbfd265e8daf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.171328] env[70020]: DEBUG oslo_vmware.api [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1055.171328] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52278729-495d-4d55-164c-d7d83dff9dc1" [ 1055.171328] env[70020]: _type = "Task" [ 1055.171328] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.179782] env[70020]: DEBUG oslo_vmware.api [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52278729-495d-4d55-164c-d7d83dff9dc1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.328318] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1055.328927] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c503392f-87bf-4774-8cbc-8f468cb4807d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.336678] env[70020]: DEBUG oslo_vmware.api [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1055.336678] env[70020]: value = "task-3618834" [ 1055.336678] env[70020]: _type = "Task" [ 1055.336678] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.345423] env[70020]: DEBUG oslo_vmware.api [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618834, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.456801] env[70020]: DEBUG nova.objects.instance [None req-2b0b2ba1-b1e1-441c-a972-aa20b1a54e58 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Lazy-loading 'flavor' on Instance uuid 3dedfa48-0839-462e-8c32-ba5252f07ac0 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1055.475112] env[70020]: DEBUG oslo_vmware.api [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': task-3618833, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074051} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.475404] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1055.476294] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f04dee58-973a-4a99-9d8d-967fbe3269d1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.499252] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] 1ddd5a29-075b-482a-a6e9-4c7345673a00/1ddd5a29-075b-482a-a6e9-4c7345673a00.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1055.500030] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e5eee4b6-91f5-4849-9ab8-1dfd43859291 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.520667] env[70020]: DEBUG oslo_vmware.api [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Waiting for the task: (returnval){ [ 1055.520667] env[70020]: value = "task-3618835" [ 1055.520667] env[70020]: _type = "Task" [ 1055.520667] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.528678] env[70020]: DEBUG oslo_vmware.api [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': task-3618835, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.636207] env[70020]: DEBUG nova.scheduler.client.report [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1055.681905] env[70020]: DEBUG oslo_vmware.api [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52278729-495d-4d55-164c-d7d83dff9dc1, 'name': SearchDatastore_Task, 'duration_secs': 0.013258} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.682731] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5131e753-d386-4a16-bdf7-67930081c103 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.688942] env[70020]: DEBUG oslo_vmware.api [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1055.688942] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5231c5af-d227-6bcc-7f1f-9d997f0fcbee" [ 1055.688942] env[70020]: _type = "Task" [ 1055.688942] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.697184] env[70020]: DEBUG oslo_vmware.api [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5231c5af-d227-6bcc-7f1f-9d997f0fcbee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.714904] env[70020]: DEBUG nova.compute.manager [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Stashing vm_state: active {{(pid=70020) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1055.725321] env[70020]: DEBUG nova.compute.manager [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1055.751811] env[70020]: DEBUG nova.virt.hardware [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1055.752171] env[70020]: DEBUG nova.virt.hardware [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1055.752381] env[70020]: DEBUG nova.virt.hardware [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1055.752687] env[70020]: DEBUG nova.virt.hardware [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1055.752946] env[70020]: DEBUG nova.virt.hardware [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1055.753234] env[70020]: DEBUG nova.virt.hardware [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1055.753473] env[70020]: DEBUG nova.virt.hardware [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1055.753643] env[70020]: DEBUG nova.virt.hardware [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1055.753821] env[70020]: DEBUG nova.virt.hardware [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1055.754086] env[70020]: DEBUG nova.virt.hardware [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1055.754183] env[70020]: DEBUG nova.virt.hardware [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1055.755155] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8151cc64-4297-478f-9ff8-34d772097cdd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.767948] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c9a7f2a-7f66-4871-bf60-4088c111ec84 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.849551] env[70020]: DEBUG oslo_vmware.api [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618834, 'name': PowerOffVM_Task, 'duration_secs': 0.176763} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.850276] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1055.851992] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ade3d022-74f3-4ffa-a75a-56d0bb1c56e0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.877059] env[70020]: INFO nova.compute.manager [None req-1378e2bd-d281-437a-9774-8561d2a48261 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Resuming [ 1055.877150] env[70020]: DEBUG nova.objects.instance [None req-1378e2bd-d281-437a-9774-8561d2a48261 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Lazy-loading 'flavor' on Instance uuid 2198e7f8-5458-4b97-abb3-0a3c932cebc2 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1055.882218] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3bd6ab1-fac3-495e-8ea8-a2ddd1da6a92 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.931512] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1055.931790] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d3431fe7-0676-41df-9af8-42b7851e8336 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.939152] env[70020]: DEBUG oslo_vmware.api [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1055.939152] env[70020]: value = "task-3618836" [ 1055.939152] env[70020]: _type = "Task" [ 1055.939152] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.952112] env[70020]: DEBUG oslo_vmware.api [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618836, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.964458] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2b0b2ba1-b1e1-441c-a972-aa20b1a54e58 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Acquiring lock "refresh_cache-3dedfa48-0839-462e-8c32-ba5252f07ac0" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1055.964458] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2b0b2ba1-b1e1-441c-a972-aa20b1a54e58 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Acquired lock "refresh_cache-3dedfa48-0839-462e-8c32-ba5252f07ac0" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1056.030908] env[70020]: DEBUG oslo_vmware.api [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': task-3618835, 'name': ReconfigVM_Task, 'duration_secs': 0.289422} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.031282] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Reconfigured VM instance instance-0000005f to attach disk [datastore1] 1ddd5a29-075b-482a-a6e9-4c7345673a00/1ddd5a29-075b-482a-a6e9-4c7345673a00.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1056.031896] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-af4ee3f3-f9bd-43ac-8fbc-a6fec8984da7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.038169] env[70020]: DEBUG oslo_vmware.api [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Waiting for the task: (returnval){ [ 1056.038169] env[70020]: value = "task-3618837" [ 1056.038169] env[70020]: _type = "Task" [ 1056.038169] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.046711] env[70020]: DEBUG oslo_vmware.api [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': task-3618837, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.143274] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.444s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1056.143838] env[70020]: DEBUG nova.compute.manager [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1056.147516] env[70020]: DEBUG nova.compute.manager [req-3852ef34-d2be-4689-b516-432e33fbe3bd req-75a8d86d-335d-442c-b327-e406d66e3f54 service nova] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Received event network-vif-plugged-658a9776-9b7b-4d90-86b3-79a86023c519 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1056.147716] env[70020]: DEBUG oslo_concurrency.lockutils [req-3852ef34-d2be-4689-b516-432e33fbe3bd req-75a8d86d-335d-442c-b327-e406d66e3f54 service nova] Acquiring lock "97fe6c57-03de-4cf8-a990-ff4f88db6cd7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1056.147919] env[70020]: DEBUG oslo_concurrency.lockutils [req-3852ef34-d2be-4689-b516-432e33fbe3bd req-75a8d86d-335d-442c-b327-e406d66e3f54 service nova] Lock "97fe6c57-03de-4cf8-a990-ff4f88db6cd7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1056.148107] env[70020]: DEBUG oslo_concurrency.lockutils [req-3852ef34-d2be-4689-b516-432e33fbe3bd req-75a8d86d-335d-442c-b327-e406d66e3f54 service nova] Lock "97fe6c57-03de-4cf8-a990-ff4f88db6cd7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1056.148285] env[70020]: DEBUG nova.compute.manager [req-3852ef34-d2be-4689-b516-432e33fbe3bd req-75a8d86d-335d-442c-b327-e406d66e3f54 service nova] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] No waiting events found dispatching network-vif-plugged-658a9776-9b7b-4d90-86b3-79a86023c519 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1056.148446] env[70020]: WARNING nova.compute.manager [req-3852ef34-d2be-4689-b516-432e33fbe3bd req-75a8d86d-335d-442c-b327-e406d66e3f54 service nova] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Received unexpected event network-vif-plugged-658a9776-9b7b-4d90-86b3-79a86023c519 for instance with vm_state building and task_state spawning. [ 1056.148967] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.203s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1056.150281] env[70020]: INFO nova.compute.claims [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1056.199432] env[70020]: DEBUG oslo_vmware.api [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5231c5af-d227-6bcc-7f1f-9d997f0fcbee, 'name': SearchDatastore_Task, 'duration_secs': 0.018841} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.199824] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1056.200192] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 04de1a07-cf38-41e0-be96-237bbe1ead83/04de1a07-cf38-41e0-be96-237bbe1ead83.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1056.200509] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ef138b13-4058-4968-9dab-1178f49a12ab {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.209265] env[70020]: DEBUG oslo_vmware.api [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1056.209265] env[70020]: value = "task-3618838" [ 1056.209265] env[70020]: _type = "Task" [ 1056.209265] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.218176] env[70020]: DEBUG oslo_vmware.api [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618838, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.239763] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1056.426951] env[70020]: DEBUG nova.network.neutron [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Successfully updated port: 658a9776-9b7b-4d90-86b3-79a86023c519 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1056.450400] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] VM already powered off {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1056.450521] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1056.450694] env[70020]: DEBUG oslo_concurrency.lockutils [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1056.450845] env[70020]: DEBUG oslo_concurrency.lockutils [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1056.451031] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1056.451287] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c2c13fdb-f863-48b0-9972-71a07aad0444 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.469411] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1056.469600] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1056.471689] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6cae284-a5a3-4548-b688-5a63341a6a27 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.477675] env[70020]: DEBUG oslo_vmware.api [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1056.477675] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52a857f3-1ef9-2b8c-7c48-457d6f36dfe0" [ 1056.477675] env[70020]: _type = "Task" [ 1056.477675] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.487324] env[70020]: DEBUG oslo_vmware.api [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52a857f3-1ef9-2b8c-7c48-457d6f36dfe0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.548316] env[70020]: DEBUG oslo_vmware.api [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': task-3618837, 'name': Rename_Task, 'duration_secs': 0.151985} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.548688] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1056.549239] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aa6046b4-3cfe-4212-940f-2ebe269d9b96 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.557078] env[70020]: DEBUG oslo_vmware.api [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Waiting for the task: (returnval){ [ 1056.557078] env[70020]: value = "task-3618839" [ 1056.557078] env[70020]: _type = "Task" [ 1056.557078] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.564732] env[70020]: DEBUG oslo_vmware.api [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': task-3618839, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.651040] env[70020]: DEBUG nova.compute.utils [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1056.652823] env[70020]: DEBUG nova.compute.manager [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1056.652931] env[70020]: DEBUG nova.network.neutron [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1056.722367] env[70020]: DEBUG oslo_vmware.api [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618838, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.764490] env[70020]: DEBUG nova.policy [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '914fc4078a214da891e7d12d242504cb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0573da12f56f4b18a103e4e9fdfb9c19', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1056.810265] env[70020]: DEBUG nova.network.neutron [None req-2b0b2ba1-b1e1-441c-a972-aa20b1a54e58 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1056.930075] env[70020]: DEBUG oslo_concurrency.lockutils [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Acquiring lock "refresh_cache-97fe6c57-03de-4cf8-a990-ff4f88db6cd7" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1056.930075] env[70020]: DEBUG oslo_concurrency.lockutils [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Acquired lock "refresh_cache-97fe6c57-03de-4cf8-a990-ff4f88db6cd7" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1056.930882] env[70020]: DEBUG nova.network.neutron [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1056.994327] env[70020]: DEBUG oslo_vmware.api [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52a857f3-1ef9-2b8c-7c48-457d6f36dfe0, 'name': SearchDatastore_Task, 'duration_secs': 0.020537} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.995849] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9424c75f-a351-4e44-88a7-34288d45976c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.004951] env[70020]: DEBUG oslo_vmware.api [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1057.004951] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52d1f302-c78a-6dea-cc26-26907bf78e11" [ 1057.004951] env[70020]: _type = "Task" [ 1057.004951] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.015819] env[70020]: DEBUG oslo_vmware.api [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52d1f302-c78a-6dea-cc26-26907bf78e11, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.070868] env[70020]: DEBUG oslo_vmware.api [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': task-3618839, 'name': PowerOnVM_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.156621] env[70020]: DEBUG nova.compute.manager [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1057.221368] env[70020]: DEBUG oslo_vmware.api [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618838, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.846604} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.227979] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 04de1a07-cf38-41e0-be96-237bbe1ead83/04de1a07-cf38-41e0-be96-237bbe1ead83.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1057.232018] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1057.232018] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a6bf4861-d42b-49d1-8368-f5020bcf46a5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.236208] env[70020]: DEBUG oslo_vmware.api [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1057.236208] env[70020]: value = "task-3618840" [ 1057.236208] env[70020]: _type = "Task" [ 1057.236208] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.248522] env[70020]: DEBUG oslo_vmware.api [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618840, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.398254] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1378e2bd-d281-437a-9774-8561d2a48261 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Acquiring lock "refresh_cache-2198e7f8-5458-4b97-abb3-0a3c932cebc2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1057.399172] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1378e2bd-d281-437a-9774-8561d2a48261 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Acquired lock "refresh_cache-2198e7f8-5458-4b97-abb3-0a3c932cebc2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1057.399434] env[70020]: DEBUG nova.network.neutron [None req-1378e2bd-d281-437a-9774-8561d2a48261 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1057.460557] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29ca2524-eb09-4e6e-9c99-4110d03d57e6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.470644] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-391dc38e-4136-48fc-a9c3-b8fed63401e4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.475289] env[70020]: DEBUG nova.network.neutron [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1057.519613] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b990f068-8bda-48d2-abdd-7b59870b7c65 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.531572] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee34de14-bf61-49db-a3ea-f0fe8db8113d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.535662] env[70020]: DEBUG oslo_vmware.api [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52d1f302-c78a-6dea-cc26-26907bf78e11, 'name': SearchDatastore_Task, 'duration_secs': 0.023149} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.535931] env[70020]: DEBUG oslo_concurrency.lockutils [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1057.536216] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d/c9cd83bf-fd12-4173-a067-f57d38f23556-rescue.vmdk. {{(pid=70020) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1057.536787] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f3dccd9c-6db0-4947-a091-8d03d1ae593e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.549138] env[70020]: DEBUG nova.compute.provider_tree [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1057.551923] env[70020]: DEBUG oslo_vmware.api [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1057.551923] env[70020]: value = "task-3618841" [ 1057.551923] env[70020]: _type = "Task" [ 1057.551923] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.560995] env[70020]: DEBUG oslo_vmware.api [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618841, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.570737] env[70020]: DEBUG oslo_vmware.api [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': task-3618839, 'name': PowerOnVM_Task, 'duration_secs': 0.606143} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.570988] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1057.571227] env[70020]: INFO nova.compute.manager [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Took 5.34 seconds to spawn the instance on the hypervisor. [ 1057.571370] env[70020]: DEBUG nova.compute.manager [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1057.572123] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9065fae5-e72a-41d3-bf6e-908858ce4730 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.747334] env[70020]: DEBUG oslo_vmware.api [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618840, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.12792} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.747334] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1057.749638] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1bdd513-28bc-4bed-848f-dfd03e7ad42c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.779517] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] 04de1a07-cf38-41e0-be96-237bbe1ead83/04de1a07-cf38-41e0-be96-237bbe1ead83.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1057.783226] env[70020]: DEBUG nova.network.neutron [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Updating instance_info_cache with network_info: [{"id": "658a9776-9b7b-4d90-86b3-79a86023c519", "address": "fa:16:3e:f3:15:80", "network": {"id": "4542a3f3-b593-414b-8cca-7a8efe33d43b", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1171548632-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "945e9ff05aef4b8eb2b6376b62015464", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d986680e-ad16-45b1-bf6d-cd2fe661679f", "external-id": "nsx-vlan-transportzone-397", "segmentation_id": 397, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap658a9776-9b", "ovs_interfaceid": "658a9776-9b7b-4d90-86b3-79a86023c519", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1057.786209] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-32e503ab-8e9a-4d65-9b7b-9be81a6682e7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.807363] env[70020]: DEBUG oslo_vmware.api [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1057.807363] env[70020]: value = "task-3618842" [ 1057.807363] env[70020]: _type = "Task" [ 1057.807363] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.818643] env[70020]: DEBUG oslo_vmware.api [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618842, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.885742] env[70020]: DEBUG nova.network.neutron [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Successfully created port: 179ff8c1-53f9-4484-9dce-1fd85174d71d {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1058.054133] env[70020]: DEBUG nova.scheduler.client.report [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1058.075217] env[70020]: DEBUG oslo_vmware.api [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618841, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.094058] env[70020]: INFO nova.compute.manager [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Took 44.20 seconds to build instance. [ 1058.141823] env[70020]: DEBUG nova.network.neutron [None req-2b0b2ba1-b1e1-441c-a972-aa20b1a54e58 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Updating instance_info_cache with network_info: [{"id": "92b2e42f-1b28-4c86-a59d-e06f2adc11da", "address": "fa:16:3e:f2:cc:c3", "network": {"id": "1f03733c-ef7b-4bed-8ce6-b27357cd0d83", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1581314653-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7252df4458bb4a1283a419877e101bf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e272539-d425-489f-9a63-aba692e88933", "external-id": "nsx-vlan-transportzone-869", "segmentation_id": 869, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92b2e42f-1b", "ovs_interfaceid": "92b2e42f-1b28-4c86-a59d-e06f2adc11da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.173780] env[70020]: DEBUG nova.compute.manager [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1058.213123] env[70020]: DEBUG nova.virt.hardware [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1058.213397] env[70020]: DEBUG nova.virt.hardware [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1058.213553] env[70020]: DEBUG nova.virt.hardware [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1058.213732] env[70020]: DEBUG nova.virt.hardware [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1058.213878] env[70020]: DEBUG nova.virt.hardware [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1058.214041] env[70020]: DEBUG nova.virt.hardware [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1058.214263] env[70020]: DEBUG nova.virt.hardware [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1058.214438] env[70020]: DEBUG nova.virt.hardware [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1058.214594] env[70020]: DEBUG nova.virt.hardware [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1058.214761] env[70020]: DEBUG nova.virt.hardware [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1058.214951] env[70020]: DEBUG nova.virt.hardware [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1058.217878] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f481171-2f9e-4faf-a849-b4236fd43aba {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.228582] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-495d1a49-69c5-4347-836f-92a7a22ca4dc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.301242] env[70020]: DEBUG oslo_concurrency.lockutils [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Releasing lock "refresh_cache-97fe6c57-03de-4cf8-a990-ff4f88db6cd7" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1058.301635] env[70020]: DEBUG nova.compute.manager [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Instance network_info: |[{"id": "658a9776-9b7b-4d90-86b3-79a86023c519", "address": "fa:16:3e:f3:15:80", "network": {"id": "4542a3f3-b593-414b-8cca-7a8efe33d43b", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1171548632-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "945e9ff05aef4b8eb2b6376b62015464", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d986680e-ad16-45b1-bf6d-cd2fe661679f", "external-id": "nsx-vlan-transportzone-397", "segmentation_id": 397, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap658a9776-9b", "ovs_interfaceid": "658a9776-9b7b-4d90-86b3-79a86023c519", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1058.302109] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f3:15:80', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd986680e-ad16-45b1-bf6d-cd2fe661679f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '658a9776-9b7b-4d90-86b3-79a86023c519', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1058.311288] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Creating folder: Project (945e9ff05aef4b8eb2b6376b62015464). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1058.311628] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-28c26a37-345b-4270-b62a-f2ead81920c4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.326935] env[70020]: DEBUG oslo_vmware.api [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618842, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.329662] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Created folder: Project (945e9ff05aef4b8eb2b6376b62015464) in parent group-v721521. [ 1058.329858] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Creating folder: Instances. Parent ref: group-v721783. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1058.330120] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bacf02d0-4ba4-4463-95ec-efd14dd81047 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.342551] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Created folder: Instances in parent group-v721783. [ 1058.342818] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1058.343035] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1058.343255] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1c765b6b-41c2-4c14-a287-df0c34d17522 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.376023] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1058.376023] env[70020]: value = "task-3618845" [ 1058.376023] env[70020]: _type = "Task" [ 1058.376023] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.385370] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618845, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.395280] env[70020]: DEBUG nova.compute.manager [req-ac050caf-b292-4145-8391-7369839995cb req-633ea784-a8f3-442b-a260-abc5d224480c service nova] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Received event network-changed-658a9776-9b7b-4d90-86b3-79a86023c519 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1058.397396] env[70020]: DEBUG nova.compute.manager [req-ac050caf-b292-4145-8391-7369839995cb req-633ea784-a8f3-442b-a260-abc5d224480c service nova] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Refreshing instance network info cache due to event network-changed-658a9776-9b7b-4d90-86b3-79a86023c519. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1058.397396] env[70020]: DEBUG oslo_concurrency.lockutils [req-ac050caf-b292-4145-8391-7369839995cb req-633ea784-a8f3-442b-a260-abc5d224480c service nova] Acquiring lock "refresh_cache-97fe6c57-03de-4cf8-a990-ff4f88db6cd7" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.397396] env[70020]: DEBUG oslo_concurrency.lockutils [req-ac050caf-b292-4145-8391-7369839995cb req-633ea784-a8f3-442b-a260-abc5d224480c service nova] Acquired lock "refresh_cache-97fe6c57-03de-4cf8-a990-ff4f88db6cd7" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1058.397396] env[70020]: DEBUG nova.network.neutron [req-ac050caf-b292-4145-8391-7369839995cb req-633ea784-a8f3-442b-a260-abc5d224480c service nova] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Refreshing network info cache for port 658a9776-9b7b-4d90-86b3-79a86023c519 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1058.438563] env[70020]: DEBUG nova.network.neutron [None req-1378e2bd-d281-437a-9774-8561d2a48261 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Updating instance_info_cache with network_info: [{"id": "52cf3b73-bbee-4e96-91f2-a1caa2041501", "address": "fa:16:3e:13:c0:92", "network": {"id": "1d8737b3-4820-4ad1-8d76-9ab1a7db867e", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2006556938-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a63e8bb4fcd844f69aaeade95326a91b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f847601f-7479-48eb-842f-41f94eea8537", "external-id": "nsx-vlan-transportzone-35", "segmentation_id": 35, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52cf3b73-bb", "ovs_interfaceid": "52cf3b73-bbee-4e96-91f2-a1caa2041501", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.569141] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.420s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1058.569600] env[70020]: DEBUG nova.compute.manager [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1058.578353] env[70020]: DEBUG oslo_concurrency.lockutils [None req-678c186b-0ee3-42e9-a900-e9149e2aecba tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.774s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1058.578550] env[70020]: DEBUG oslo_concurrency.lockutils [None req-678c186b-0ee3-42e9-a900-e9149e2aecba tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1058.580512] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.253s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1058.582828] env[70020]: INFO nova.compute.claims [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1058.586018] env[70020]: DEBUG oslo_vmware.api [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618841, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.702744} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.586018] env[70020]: INFO nova.virt.vmwareapi.ds_util [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d/c9cd83bf-fd12-4173-a067-f57d38f23556-rescue.vmdk. [ 1058.586633] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0da3a3c6-3ca2-4a53-81ee-67fe04619edf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.595988] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4e05fb98-36f5-4c59-9f51-8e1ecec9d465 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Lock "1ddd5a29-075b-482a-a6e9-4c7345673a00" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.726s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1058.619347] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Reconfiguring VM instance instance-0000005c to attach disk [datastore2] b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d/c9cd83bf-fd12-4173-a067-f57d38f23556-rescue.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1058.619347] env[70020]: INFO nova.scheduler.client.report [None req-678c186b-0ee3-42e9-a900-e9149e2aecba tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Deleted allocations for instance 056141e3-5628-4451-bd25-f4fa15edd11e [ 1058.621302] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7cd5a498-f0a6-43de-8edb-4786ca9842c2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.647487] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2b0b2ba1-b1e1-441c-a972-aa20b1a54e58 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Releasing lock "refresh_cache-3dedfa48-0839-462e-8c32-ba5252f07ac0" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1058.647487] env[70020]: DEBUG nova.compute.manager [None req-2b0b2ba1-b1e1-441c-a972-aa20b1a54e58 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Inject network info {{(pid=70020) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 1058.647487] env[70020]: DEBUG nova.compute.manager [None req-2b0b2ba1-b1e1-441c-a972-aa20b1a54e58 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] network_info to inject: |[{"id": "92b2e42f-1b28-4c86-a59d-e06f2adc11da", "address": "fa:16:3e:f2:cc:c3", "network": {"id": "1f03733c-ef7b-4bed-8ce6-b27357cd0d83", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1581314653-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7252df4458bb4a1283a419877e101bf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e272539-d425-489f-9a63-aba692e88933", "external-id": "nsx-vlan-transportzone-869", "segmentation_id": 869, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92b2e42f-1b", "ovs_interfaceid": "92b2e42f-1b28-4c86-a59d-e06f2adc11da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 1058.654793] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2b0b2ba1-b1e1-441c-a972-aa20b1a54e58 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Reconfiguring VM instance to set the machine id {{(pid=70020) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1058.656866] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-26ff3a2d-fcba-4497-bb8c-56cae3e374a4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.669739] env[70020]: DEBUG oslo_vmware.api [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1058.669739] env[70020]: value = "task-3618846" [ 1058.669739] env[70020]: _type = "Task" [ 1058.669739] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.677627] env[70020]: DEBUG oslo_vmware.api [None req-2b0b2ba1-b1e1-441c-a972-aa20b1a54e58 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Waiting for the task: (returnval){ [ 1058.677627] env[70020]: value = "task-3618847" [ 1058.677627] env[70020]: _type = "Task" [ 1058.677627] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.681252] env[70020]: DEBUG oslo_vmware.api [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618846, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.692933] env[70020]: DEBUG oslo_vmware.api [None req-2b0b2ba1-b1e1-441c-a972-aa20b1a54e58 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Task: {'id': task-3618847, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.825726] env[70020]: DEBUG oslo_vmware.api [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618842, 'name': ReconfigVM_Task, 'duration_secs': 0.599093} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.825726] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Reconfigured VM instance instance-0000005e to attach disk [datastore1] 04de1a07-cf38-41e0-be96-237bbe1ead83/04de1a07-cf38-41e0-be96-237bbe1ead83.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1058.826425] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-89391639-bcab-42eb-8c77-61da2c1df1e2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.837808] env[70020]: DEBUG oslo_vmware.api [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1058.837808] env[70020]: value = "task-3618848" [ 1058.837808] env[70020]: _type = "Task" [ 1058.837808] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.847987] env[70020]: DEBUG oslo_vmware.api [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618848, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.886525] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618845, 'name': CreateVM_Task, 'duration_secs': 0.438812} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.886740] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1058.887575] env[70020]: DEBUG oslo_concurrency.lockutils [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.887770] env[70020]: DEBUG oslo_concurrency.lockutils [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1058.888333] env[70020]: DEBUG oslo_concurrency.lockutils [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1058.888481] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78bf11da-9c63-4029-be37-c1ed3bb40a95 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.895606] env[70020]: DEBUG oslo_vmware.api [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Waiting for the task: (returnval){ [ 1058.895606] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52561f35-7308-83f0-7b2f-f7985eb774dc" [ 1058.895606] env[70020]: _type = "Task" [ 1058.895606] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.909682] env[70020]: DEBUG oslo_vmware.api [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52561f35-7308-83f0-7b2f-f7985eb774dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.942013] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1378e2bd-d281-437a-9774-8561d2a48261 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Releasing lock "refresh_cache-2198e7f8-5458-4b97-abb3-0a3c932cebc2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1058.943130] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c331cfb-8bb2-4aa3-b29b-8aa654a8933a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.951214] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-1378e2bd-d281-437a-9774-8561d2a48261 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Resuming the VM {{(pid=70020) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1058.951485] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5596552f-4c5e-440b-a4e5-e0028e3e80e9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.959689] env[70020]: DEBUG oslo_vmware.api [None req-1378e2bd-d281-437a-9774-8561d2a48261 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for the task: (returnval){ [ 1058.959689] env[70020]: value = "task-3618849" [ 1058.959689] env[70020]: _type = "Task" [ 1058.959689] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.972495] env[70020]: DEBUG oslo_vmware.api [None req-1378e2bd-d281-437a-9774-8561d2a48261 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618849, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.080441] env[70020]: DEBUG nova.compute.utils [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1059.081778] env[70020]: DEBUG nova.compute.manager [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1059.081980] env[70020]: DEBUG nova.network.neutron [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1059.087739] env[70020]: INFO nova.compute.manager [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Rebuilding instance [ 1059.147539] env[70020]: DEBUG nova.compute.manager [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1059.148428] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4455dfc5-6e8e-4d77-bb0f-8f2dd2468a0e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.153629] env[70020]: DEBUG nova.policy [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '025d293d3c0449e1b36a7aa465ad1110', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bd3733a000724aab9255cb498cecdfba', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1059.161475] env[70020]: DEBUG oslo_concurrency.lockutils [None req-678c186b-0ee3-42e9-a900-e9149e2aecba tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "056141e3-5628-4451-bd25-f4fa15edd11e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.249s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1059.187684] env[70020]: DEBUG oslo_vmware.api [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618846, 'name': ReconfigVM_Task, 'duration_secs': 0.375347} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.194040] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Reconfigured VM instance instance-0000005c to attach disk [datastore2] b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d/c9cd83bf-fd12-4173-a067-f57d38f23556-rescue.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1059.195914] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52f427b2-ea27-4440-8f7e-eee48932e687 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.222958] env[70020]: DEBUG oslo_vmware.api [None req-2b0b2ba1-b1e1-441c-a972-aa20b1a54e58 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Task: {'id': task-3618847, 'name': ReconfigVM_Task, 'duration_secs': 0.220272} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.229724] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2b0b2ba1-b1e1-441c-a972-aa20b1a54e58 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Reconfigured VM instance to set the machine id {{(pid=70020) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1059.232383] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0fd2d631-d74d-4fdd-8d31-1fd7bf5ae418 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.252790] env[70020]: DEBUG oslo_vmware.api [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1059.252790] env[70020]: value = "task-3618850" [ 1059.252790] env[70020]: _type = "Task" [ 1059.252790] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.263781] env[70020]: DEBUG oslo_vmware.api [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618850, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.294732] env[70020]: DEBUG nova.objects.instance [None req-e0ed91f8-c8f1-4bc3-bf46-763d90361de3 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Lazy-loading 'flavor' on Instance uuid 3dedfa48-0839-462e-8c32-ba5252f07ac0 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1059.310845] env[70020]: DEBUG nova.network.neutron [req-ac050caf-b292-4145-8391-7369839995cb req-633ea784-a8f3-442b-a260-abc5d224480c service nova] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Updated VIF entry in instance network info cache for port 658a9776-9b7b-4d90-86b3-79a86023c519. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1059.316019] env[70020]: DEBUG nova.network.neutron [req-ac050caf-b292-4145-8391-7369839995cb req-633ea784-a8f3-442b-a260-abc5d224480c service nova] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Updating instance_info_cache with network_info: [{"id": "658a9776-9b7b-4d90-86b3-79a86023c519", "address": "fa:16:3e:f3:15:80", "network": {"id": "4542a3f3-b593-414b-8cca-7a8efe33d43b", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1171548632-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "945e9ff05aef4b8eb2b6376b62015464", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d986680e-ad16-45b1-bf6d-cd2fe661679f", "external-id": "nsx-vlan-transportzone-397", "segmentation_id": 397, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap658a9776-9b", "ovs_interfaceid": "658a9776-9b7b-4d90-86b3-79a86023c519", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1059.350378] env[70020]: DEBUG oslo_vmware.api [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618848, 'name': Rename_Task, 'duration_secs': 0.200315} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.350651] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1059.350934] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f0c31317-1991-4a1e-ab7d-763d6e34fbd8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.358798] env[70020]: DEBUG oslo_vmware.api [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1059.358798] env[70020]: value = "task-3618851" [ 1059.358798] env[70020]: _type = "Task" [ 1059.358798] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.368775] env[70020]: DEBUG oslo_vmware.api [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618851, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.410647] env[70020]: DEBUG oslo_vmware.api [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52561f35-7308-83f0-7b2f-f7985eb774dc, 'name': SearchDatastore_Task, 'duration_secs': 0.011497} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.412020] env[70020]: DEBUG oslo_concurrency.lockutils [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1059.412020] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1059.412020] env[70020]: DEBUG oslo_concurrency.lockutils [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1059.412020] env[70020]: DEBUG oslo_concurrency.lockutils [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1059.412020] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1059.412253] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2f3d9af1-897e-4780-ba85-20f2a9201784 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.428873] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1059.429215] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1059.429976] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1da3e7c-b8d8-4aaf-90b8-0d88c78d1b51 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.439312] env[70020]: DEBUG oslo_vmware.api [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Waiting for the task: (returnval){ [ 1059.439312] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5283b9a8-9cc2-8d98-62b0-7e9df6181e2d" [ 1059.439312] env[70020]: _type = "Task" [ 1059.439312] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.446539] env[70020]: DEBUG oslo_vmware.api [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5283b9a8-9cc2-8d98-62b0-7e9df6181e2d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.471083] env[70020]: DEBUG oslo_vmware.api [None req-1378e2bd-d281-437a-9774-8561d2a48261 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618849, 'name': PowerOnVM_Task} progress is 88%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.585341] env[70020]: DEBUG nova.compute.manager [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1059.705224] env[70020]: DEBUG nova.network.neutron [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Successfully created port: 1b623703-5d19-4e24-b8aa-5b76192f92f5 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1059.766508] env[70020]: DEBUG oslo_vmware.api [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618850, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.800716] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e0ed91f8-c8f1-4bc3-bf46-763d90361de3 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Acquiring lock "refresh_cache-3dedfa48-0839-462e-8c32-ba5252f07ac0" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1059.800931] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e0ed91f8-c8f1-4bc3-bf46-763d90361de3 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Acquired lock "refresh_cache-3dedfa48-0839-462e-8c32-ba5252f07ac0" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1059.816488] env[70020]: DEBUG oslo_concurrency.lockutils [req-ac050caf-b292-4145-8391-7369839995cb req-633ea784-a8f3-442b-a260-abc5d224480c service nova] Releasing lock "refresh_cache-97fe6c57-03de-4cf8-a990-ff4f88db6cd7" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1059.816862] env[70020]: DEBUG nova.compute.manager [req-ac050caf-b292-4145-8391-7369839995cb req-633ea784-a8f3-442b-a260-abc5d224480c service nova] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Received event network-changed-92b2e42f-1b28-4c86-a59d-e06f2adc11da {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1059.817370] env[70020]: DEBUG nova.compute.manager [req-ac050caf-b292-4145-8391-7369839995cb req-633ea784-a8f3-442b-a260-abc5d224480c service nova] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Refreshing instance network info cache due to event network-changed-92b2e42f-1b28-4c86-a59d-e06f2adc11da. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1059.817890] env[70020]: DEBUG oslo_concurrency.lockutils [req-ac050caf-b292-4145-8391-7369839995cb req-633ea784-a8f3-442b-a260-abc5d224480c service nova] Acquiring lock "refresh_cache-3dedfa48-0839-462e-8c32-ba5252f07ac0" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1059.879849] env[70020]: DEBUG oslo_vmware.api [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618851, 'name': PowerOnVM_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.950272] env[70020]: DEBUG oslo_vmware.api [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5283b9a8-9cc2-8d98-62b0-7e9df6181e2d, 'name': SearchDatastore_Task, 'duration_secs': 0.036569} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.951180] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b93ab90d-4d29-451d-82d2-2c90c799dfed {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.958779] env[70020]: DEBUG oslo_vmware.api [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Waiting for the task: (returnval){ [ 1059.958779] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52d27ee5-a94f-cf26-6651-7e581f9a5208" [ 1059.958779] env[70020]: _type = "Task" [ 1059.958779] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.977237] env[70020]: DEBUG oslo_vmware.api [None req-1378e2bd-d281-437a-9774-8561d2a48261 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618849, 'name': PowerOnVM_Task, 'duration_secs': 0.82178} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.977436] env[70020]: DEBUG oslo_vmware.api [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52d27ee5-a94f-cf26-6651-7e581f9a5208, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.978445] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-1378e2bd-d281-437a-9774-8561d2a48261 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Resumed the VM {{(pid=70020) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1059.978445] env[70020]: DEBUG nova.compute.manager [None req-1378e2bd-d281-437a-9774-8561d2a48261 tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1059.978623] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e987b260-f863-4239-9102-4c58f3520af0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.078572] env[70020]: DEBUG nova.network.neutron [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Successfully updated port: 179ff8c1-53f9-4484-9dce-1fd85174d71d {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1060.097955] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af8796fc-ce93-4f8a-b342-a0c9481a7edb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.104978] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6461745-3f7e-4a03-a5b9-fc7819da70c3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.139889] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebbc4800-dd64-4977-a499-7f9651c5d06d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.148847] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0630ad3e-e8b8-48fb-972b-5610d316261e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.164280] env[70020]: DEBUG nova.compute.provider_tree [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1060.169535] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1060.169840] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ee759f95-8a8b-4544-90d2-1d4272174954 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.178939] env[70020]: DEBUG oslo_vmware.api [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Waiting for the task: (returnval){ [ 1060.178939] env[70020]: value = "task-3618852" [ 1060.178939] env[70020]: _type = "Task" [ 1060.178939] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.190373] env[70020]: DEBUG oslo_vmware.api [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': task-3618852, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.263853] env[70020]: DEBUG oslo_vmware.api [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618850, 'name': ReconfigVM_Task, 'duration_secs': 0.599688} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.264153] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1060.264407] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4796b731-ce18-4d89-961c-d0ffe1d6e277 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.272389] env[70020]: DEBUG oslo_vmware.api [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1060.272389] env[70020]: value = "task-3618853" [ 1060.272389] env[70020]: _type = "Task" [ 1060.272389] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.280874] env[70020]: DEBUG oslo_vmware.api [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618853, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.380729] env[70020]: DEBUG oslo_vmware.api [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618851, 'name': PowerOnVM_Task, 'duration_secs': 0.679602} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.380729] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1060.381061] env[70020]: DEBUG nova.compute.manager [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1060.381921] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8947747-d580-4500-a847-95b2f2d0a027 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.477126] env[70020]: DEBUG oslo_vmware.api [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52d27ee5-a94f-cf26-6651-7e581f9a5208, 'name': SearchDatastore_Task, 'duration_secs': 0.016251} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.477796] env[70020]: DEBUG oslo_concurrency.lockutils [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1060.478562] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 97fe6c57-03de-4cf8-a990-ff4f88db6cd7/97fe6c57-03de-4cf8-a990-ff4f88db6cd7.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1060.478562] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0ec3cf3e-462b-425e-91bd-b4ce550e52e6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.487924] env[70020]: DEBUG oslo_vmware.api [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Waiting for the task: (returnval){ [ 1060.487924] env[70020]: value = "task-3618854" [ 1060.487924] env[70020]: _type = "Task" [ 1060.487924] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.502754] env[70020]: DEBUG oslo_vmware.api [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Task: {'id': task-3618854, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.584986] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "refresh_cache-c972e083-8c91-4875-a8c6-8257b06c93a1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1060.584986] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquired lock "refresh_cache-c972e083-8c91-4875-a8c6-8257b06c93a1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1060.584986] env[70020]: DEBUG nova.network.neutron [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1060.598740] env[70020]: DEBUG nova.compute.manager [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1060.634137] env[70020]: DEBUG nova.virt.hardware [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1060.634418] env[70020]: DEBUG nova.virt.hardware [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1060.634558] env[70020]: DEBUG nova.virt.hardware [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1060.635807] env[70020]: DEBUG nova.virt.hardware [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1060.635922] env[70020]: DEBUG nova.virt.hardware [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1060.636133] env[70020]: DEBUG nova.virt.hardware [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1060.636289] env[70020]: DEBUG nova.virt.hardware [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1060.636453] env[70020]: DEBUG nova.virt.hardware [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1060.636619] env[70020]: DEBUG nova.virt.hardware [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1060.636780] env[70020]: DEBUG nova.virt.hardware [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1060.636972] env[70020]: DEBUG nova.virt.hardware [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1060.638720] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b80e4cf8-a216-460f-8a6f-7d1c3a209d0e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.647449] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a4b22e2-0a01-4347-8894-99e85b8c6e89 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.667250] env[70020]: DEBUG nova.scheduler.client.report [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1060.690267] env[70020]: DEBUG oslo_vmware.api [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': task-3618852, 'name': PowerOffVM_Task, 'duration_secs': 0.185665} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.691398] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1060.691489] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1060.692468] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13c29050-5e12-4d88-bc33-216b567847ce {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.701754] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1060.702836] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a88f566e-7ed5-4a0f-b15d-32b22c557af1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.752025] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1060.752025] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1060.752025] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Deleting the datastore file [datastore1] 1ddd5a29-075b-482a-a6e9-4c7345673a00 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1060.752025] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2d517778-c0cc-4c97-94b4-3523b7e51e14 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.762021] env[70020]: DEBUG oslo_vmware.api [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Waiting for the task: (returnval){ [ 1060.762021] env[70020]: value = "task-3618856" [ 1060.762021] env[70020]: _type = "Task" [ 1060.762021] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.775261] env[70020]: DEBUG oslo_vmware.api [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': task-3618856, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.784542] env[70020]: DEBUG oslo_vmware.api [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618853, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.902850] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1060.944827] env[70020]: DEBUG nova.network.neutron [None req-e0ed91f8-c8f1-4bc3-bf46-763d90361de3 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1061.003336] env[70020]: DEBUG oslo_vmware.api [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Task: {'id': task-3618854, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.083908] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "e5c6ad2e-9925-4234-a7da-ea2618b7c7d5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1061.083908] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "e5c6ad2e-9925-4234-a7da-ea2618b7c7d5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1061.131938] env[70020]: DEBUG nova.network.neutron [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1061.174447] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.594s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1061.175061] env[70020]: DEBUG nova.compute.manager [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1061.178090] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.790s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1061.178354] env[70020]: DEBUG nova.objects.instance [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lazy-loading 'resources' on Instance uuid da07cb36-244f-4f48-a5b6-8d00324c1edf {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1061.235417] env[70020]: DEBUG nova.compute.manager [req-fec0d369-bd9b-4795-974e-3426c7866c22 req-b882b177-b859-40be-96b8-d1a72dff007e service nova] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Received event network-vif-plugged-179ff8c1-53f9-4484-9dce-1fd85174d71d {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1061.235638] env[70020]: DEBUG oslo_concurrency.lockutils [req-fec0d369-bd9b-4795-974e-3426c7866c22 req-b882b177-b859-40be-96b8-d1a72dff007e service nova] Acquiring lock "c972e083-8c91-4875-a8c6-8257b06c93a1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1061.235908] env[70020]: DEBUG oslo_concurrency.lockutils [req-fec0d369-bd9b-4795-974e-3426c7866c22 req-b882b177-b859-40be-96b8-d1a72dff007e service nova] Lock "c972e083-8c91-4875-a8c6-8257b06c93a1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1061.236187] env[70020]: DEBUG oslo_concurrency.lockutils [req-fec0d369-bd9b-4795-974e-3426c7866c22 req-b882b177-b859-40be-96b8-d1a72dff007e service nova] Lock "c972e083-8c91-4875-a8c6-8257b06c93a1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1061.236298] env[70020]: DEBUG nova.compute.manager [req-fec0d369-bd9b-4795-974e-3426c7866c22 req-b882b177-b859-40be-96b8-d1a72dff007e service nova] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] No waiting events found dispatching network-vif-plugged-179ff8c1-53f9-4484-9dce-1fd85174d71d {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1061.236461] env[70020]: WARNING nova.compute.manager [req-fec0d369-bd9b-4795-974e-3426c7866c22 req-b882b177-b859-40be-96b8-d1a72dff007e service nova] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Received unexpected event network-vif-plugged-179ff8c1-53f9-4484-9dce-1fd85174d71d for instance with vm_state building and task_state spawning. [ 1061.236618] env[70020]: DEBUG nova.compute.manager [req-fec0d369-bd9b-4795-974e-3426c7866c22 req-b882b177-b859-40be-96b8-d1a72dff007e service nova] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Received event network-changed-179ff8c1-53f9-4484-9dce-1fd85174d71d {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1061.237071] env[70020]: DEBUG nova.compute.manager [req-fec0d369-bd9b-4795-974e-3426c7866c22 req-b882b177-b859-40be-96b8-d1a72dff007e service nova] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Refreshing instance network info cache due to event network-changed-179ff8c1-53f9-4484-9dce-1fd85174d71d. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1061.237071] env[70020]: DEBUG oslo_concurrency.lockutils [req-fec0d369-bd9b-4795-974e-3426c7866c22 req-b882b177-b859-40be-96b8-d1a72dff007e service nova] Acquiring lock "refresh_cache-c972e083-8c91-4875-a8c6-8257b06c93a1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.271324] env[70020]: DEBUG oslo_vmware.api [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': task-3618856, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.304064} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.271838] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1061.272047] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1061.272229] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1061.283921] env[70020]: DEBUG oslo_vmware.api [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618853, 'name': PowerOnVM_Task, 'duration_secs': 0.860075} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.284169] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1061.287182] env[70020]: DEBUG nova.compute.manager [None req-914c5ac9-a4c2-47f6-bd7d-ae6cb39bc5c9 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1061.287591] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e2a8471-63d0-4bb0-8ff5-d5cf1d1c24dd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.378984] env[70020]: DEBUG nova.network.neutron [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Updating instance_info_cache with network_info: [{"id": "179ff8c1-53f9-4484-9dce-1fd85174d71d", "address": "fa:16:3e:d7:53:7a", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap179ff8c1-53", "ovs_interfaceid": "179ff8c1-53f9-4484-9dce-1fd85174d71d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1061.431971] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Acquiring lock "3163a070-a0db-4a41-af32-dfbe7a1766ac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1061.432254] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Lock "3163a070-a0db-4a41-af32-dfbe7a1766ac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1061.502837] env[70020]: DEBUG oslo_vmware.api [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Task: {'id': task-3618854, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.864348} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.502966] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 97fe6c57-03de-4cf8-a990-ff4f88db6cd7/97fe6c57-03de-4cf8-a990-ff4f88db6cd7.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1061.503123] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1061.503358] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ed7c35f6-d85e-432b-a8e6-ec796d8132ca {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.512711] env[70020]: DEBUG oslo_vmware.api [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Waiting for the task: (returnval){ [ 1061.512711] env[70020]: value = "task-3618857" [ 1061.512711] env[70020]: _type = "Task" [ 1061.512711] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.529751] env[70020]: DEBUG oslo_vmware.api [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Task: {'id': task-3618857, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.571194] env[70020]: DEBUG nova.compute.manager [req-35f3c294-bc4f-4ae0-b0e8-5ba30a8e7aa7 req-09bae485-9114-47be-86cf-26b348ba4fb3 service nova] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Received event network-changed-92b2e42f-1b28-4c86-a59d-e06f2adc11da {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1061.571194] env[70020]: DEBUG nova.compute.manager [req-35f3c294-bc4f-4ae0-b0e8-5ba30a8e7aa7 req-09bae485-9114-47be-86cf-26b348ba4fb3 service nova] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Refreshing instance network info cache due to event network-changed-92b2e42f-1b28-4c86-a59d-e06f2adc11da. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1061.571194] env[70020]: DEBUG oslo_concurrency.lockutils [req-35f3c294-bc4f-4ae0-b0e8-5ba30a8e7aa7 req-09bae485-9114-47be-86cf-26b348ba4fb3 service nova] Acquiring lock "refresh_cache-3dedfa48-0839-462e-8c32-ba5252f07ac0" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.586597] env[70020]: DEBUG nova.compute.manager [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1061.620148] env[70020]: DEBUG nova.network.neutron [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Successfully updated port: 1b623703-5d19-4e24-b8aa-5b76192f92f5 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1061.684068] env[70020]: DEBUG nova.compute.utils [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1061.689094] env[70020]: DEBUG nova.compute.manager [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1061.690156] env[70020]: DEBUG nova.network.neutron [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1061.757781] env[70020]: DEBUG nova.policy [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '291265cdc1164603a9011173b1457c31', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '74b060ffb3ac4ecd95dcd85d4744dc2a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1061.852378] env[70020]: DEBUG nova.network.neutron [None req-e0ed91f8-c8f1-4bc3-bf46-763d90361de3 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Updating instance_info_cache with network_info: [{"id": "92b2e42f-1b28-4c86-a59d-e06f2adc11da", "address": "fa:16:3e:f2:cc:c3", "network": {"id": "1f03733c-ef7b-4bed-8ce6-b27357cd0d83", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1581314653-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7252df4458bb4a1283a419877e101bf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e272539-d425-489f-9a63-aba692e88933", "external-id": "nsx-vlan-transportzone-869", "segmentation_id": 869, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92b2e42f-1b", "ovs_interfaceid": "92b2e42f-1b28-4c86-a59d-e06f2adc11da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1061.885572] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Releasing lock "refresh_cache-c972e083-8c91-4875-a8c6-8257b06c93a1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1061.885938] env[70020]: DEBUG nova.compute.manager [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Instance network_info: |[{"id": "179ff8c1-53f9-4484-9dce-1fd85174d71d", "address": "fa:16:3e:d7:53:7a", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap179ff8c1-53", "ovs_interfaceid": "179ff8c1-53f9-4484-9dce-1fd85174d71d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1061.886572] env[70020]: DEBUG oslo_concurrency.lockutils [req-fec0d369-bd9b-4795-974e-3426c7866c22 req-b882b177-b859-40be-96b8-d1a72dff007e service nova] Acquired lock "refresh_cache-c972e083-8c91-4875-a8c6-8257b06c93a1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1061.886761] env[70020]: DEBUG nova.network.neutron [req-fec0d369-bd9b-4795-974e-3426c7866c22 req-b882b177-b859-40be-96b8-d1a72dff007e service nova] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Refreshing network info cache for port 179ff8c1-53f9-4484-9dce-1fd85174d71d {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1061.888140] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:53:7a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd5970ab5-34b8-4065-bfa6-f568b8f103b7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '179ff8c1-53f9-4484-9dce-1fd85174d71d', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1061.900685] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Creating folder: Project (0573da12f56f4b18a103e4e9fdfb9c19). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1061.910620] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d5ff7dd8-d006-442e-8d1a-e03f2f5c3666 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.926445] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Created folder: Project (0573da12f56f4b18a103e4e9fdfb9c19) in parent group-v721521. [ 1061.926445] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Creating folder: Instances. Parent ref: group-v721786. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1061.928758] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-06f6dcdb-22bb-4d30-bb74-84aa3721073c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.935029] env[70020]: DEBUG nova.compute.manager [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1061.944301] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Created folder: Instances in parent group-v721786. [ 1061.944301] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1061.944301] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1061.944301] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eadf07e8-ca2c-48b3-9ff3-929710fa6d6d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.969823] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1061.969823] env[70020]: value = "task-3618860" [ 1061.969823] env[70020]: _type = "Task" [ 1061.969823] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.979086] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618860, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.024283] env[70020]: DEBUG oslo_vmware.api [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Task: {'id': task-3618857, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08463} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.027145] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1062.030426] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9dbc07b-d084-4fcb-a8fa-115186552eba {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.056052] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] 97fe6c57-03de-4cf8-a990-ff4f88db6cd7/97fe6c57-03de-4cf8-a990-ff4f88db6cd7.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1062.060159] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-16af8386-9efd-4027-a2bf-904feaab88c9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.088155] env[70020]: DEBUG oslo_vmware.api [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Waiting for the task: (returnval){ [ 1062.088155] env[70020]: value = "task-3618861" [ 1062.088155] env[70020]: _type = "Task" [ 1062.088155] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.107925] env[70020]: DEBUG oslo_vmware.api [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Task: {'id': task-3618861, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.113363] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1062.123262] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "refresh_cache-9d1568bf-4027-4d4c-b089-276006eee715" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1062.123454] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquired lock "refresh_cache-9d1568bf-4027-4d4c-b089-276006eee715" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1062.123612] env[70020]: DEBUG nova.network.neutron [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1062.127201] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39dabfea-e341-44b6-ac3f-099d761c78e6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.139971] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-defcd68a-7a2a-473f-81f7-e3e66e35effc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.180394] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46c44d2e-47ef-4b20-aa61-91b97663f3d5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.189107] env[70020]: DEBUG nova.compute.manager [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1062.193087] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb1408f7-c8e0-4dd4-8ecf-31daf9533733 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.209784] env[70020]: DEBUG nova.compute.provider_tree [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1062.301440] env[70020]: DEBUG nova.network.neutron [req-fec0d369-bd9b-4795-974e-3426c7866c22 req-b882b177-b859-40be-96b8-d1a72dff007e service nova] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Updated VIF entry in instance network info cache for port 179ff8c1-53f9-4484-9dce-1fd85174d71d. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1062.302405] env[70020]: DEBUG nova.network.neutron [req-fec0d369-bd9b-4795-974e-3426c7866c22 req-b882b177-b859-40be-96b8-d1a72dff007e service nova] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Updating instance_info_cache with network_info: [{"id": "179ff8c1-53f9-4484-9dce-1fd85174d71d", "address": "fa:16:3e:d7:53:7a", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap179ff8c1-53", "ovs_interfaceid": "179ff8c1-53f9-4484-9dce-1fd85174d71d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1062.321737] env[70020]: DEBUG nova.virt.hardware [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1062.322299] env[70020]: DEBUG nova.virt.hardware [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1062.322586] env[70020]: DEBUG nova.virt.hardware [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1062.322869] env[70020]: DEBUG nova.virt.hardware [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1062.323118] env[70020]: DEBUG nova.virt.hardware [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1062.323420] env[70020]: DEBUG nova.virt.hardware [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1062.323879] env[70020]: DEBUG nova.virt.hardware [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1062.324179] env[70020]: DEBUG nova.virt.hardware [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1062.324588] env[70020]: DEBUG nova.virt.hardware [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1062.324910] env[70020]: DEBUG nova.virt.hardware [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1062.325600] env[70020]: DEBUG nova.virt.hardware [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1062.326897] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-645e9a98-9b3a-4cc9-80b2-ab2e6181d1b0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.330961] env[70020]: DEBUG nova.network.neutron [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Successfully created port: 8e1b8b9c-b1c2-448e-8d9c-621c1810194a {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1062.340275] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef4642c6-4b61-4e1b-9c96-30f088472401 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.364939] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e0ed91f8-c8f1-4bc3-bf46-763d90361de3 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Releasing lock "refresh_cache-3dedfa48-0839-462e-8c32-ba5252f07ac0" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1062.365670] env[70020]: DEBUG nova.compute.manager [None req-e0ed91f8-c8f1-4bc3-bf46-763d90361de3 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Inject network info {{(pid=70020) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 1062.366287] env[70020]: DEBUG nova.compute.manager [None req-e0ed91f8-c8f1-4bc3-bf46-763d90361de3 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] network_info to inject: |[{"id": "92b2e42f-1b28-4c86-a59d-e06f2adc11da", "address": "fa:16:3e:f2:cc:c3", "network": {"id": "1f03733c-ef7b-4bed-8ce6-b27357cd0d83", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1581314653-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7252df4458bb4a1283a419877e101bf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e272539-d425-489f-9a63-aba692e88933", "external-id": "nsx-vlan-transportzone-869", "segmentation_id": 869, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92b2e42f-1b", "ovs_interfaceid": "92b2e42f-1b28-4c86-a59d-e06f2adc11da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 1062.373906] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e0ed91f8-c8f1-4bc3-bf46-763d90361de3 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Reconfiguring VM instance to set the machine id {{(pid=70020) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1062.377025] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Instance VIF info [] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1062.381299] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1062.381781] env[70020]: DEBUG oslo_concurrency.lockutils [req-ac050caf-b292-4145-8391-7369839995cb req-633ea784-a8f3-442b-a260-abc5d224480c service nova] Acquired lock "refresh_cache-3dedfa48-0839-462e-8c32-ba5252f07ac0" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1062.382115] env[70020]: DEBUG nova.network.neutron [req-ac050caf-b292-4145-8391-7369839995cb req-633ea784-a8f3-442b-a260-abc5d224480c service nova] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Refreshing network info cache for port 92b2e42f-1b28-4c86-a59d-e06f2adc11da {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1062.384203] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c4bd86ed-bbc5-44ec-aa05-5e87256a7514 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.397008] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1062.400504] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2972c133-f82b-47a7-8b36-2f5a9a830f2d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.420427] env[70020]: DEBUG oslo_vmware.api [None req-e0ed91f8-c8f1-4bc3-bf46-763d90361de3 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Waiting for the task: (returnval){ [ 1062.420427] env[70020]: value = "task-3618863" [ 1062.420427] env[70020]: _type = "Task" [ 1062.420427] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.421786] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1062.421786] env[70020]: value = "task-3618862" [ 1062.421786] env[70020]: _type = "Task" [ 1062.421786] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.434287] env[70020]: DEBUG oslo_vmware.api [None req-e0ed91f8-c8f1-4bc3-bf46-763d90361de3 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Task: {'id': task-3618863, 'name': ReconfigVM_Task} progress is 10%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.437576] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618862, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.461751] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1062.485457] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618860, 'name': CreateVM_Task, 'duration_secs': 0.459408} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.485457] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1062.486532] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1062.486773] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1062.487295] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1062.487487] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c61959f-6b40-49ca-83e2-180cc8dca368 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.493992] env[70020]: DEBUG oslo_vmware.api [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1062.493992] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5292bd45-6f8e-94ae-615e-c90abb7c5f81" [ 1062.493992] env[70020]: _type = "Task" [ 1062.493992] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.507265] env[70020]: DEBUG oslo_vmware.api [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5292bd45-6f8e-94ae-615e-c90abb7c5f81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.608008] env[70020]: DEBUG oslo_vmware.api [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Task: {'id': task-3618861, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.749561] env[70020]: ERROR nova.scheduler.client.report [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [req-128eb941-aac2-46b8-8084-33cd47fe8062] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-128eb941-aac2-46b8-8084-33cd47fe8062"}]} [ 1062.772838] env[70020]: DEBUG nova.scheduler.client.report [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1062.796319] env[70020]: DEBUG nova.scheduler.client.report [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1062.796821] env[70020]: DEBUG nova.compute.provider_tree [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1062.799149] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6e9a3a1f-c2ef-4dc1-98eb-e8e9c25236b2 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "04de1a07-cf38-41e0-be96-237bbe1ead83" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1062.799396] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6e9a3a1f-c2ef-4dc1-98eb-e8e9c25236b2 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "04de1a07-cf38-41e0-be96-237bbe1ead83" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1062.799602] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6e9a3a1f-c2ef-4dc1-98eb-e8e9c25236b2 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "04de1a07-cf38-41e0-be96-237bbe1ead83-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1062.799783] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6e9a3a1f-c2ef-4dc1-98eb-e8e9c25236b2 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "04de1a07-cf38-41e0-be96-237bbe1ead83-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1062.799987] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6e9a3a1f-c2ef-4dc1-98eb-e8e9c25236b2 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "04de1a07-cf38-41e0-be96-237bbe1ead83-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1062.804700] env[70020]: INFO nova.compute.manager [None req-6e9a3a1f-c2ef-4dc1-98eb-e8e9c25236b2 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Terminating instance [ 1062.807122] env[70020]: DEBUG oslo_concurrency.lockutils [req-fec0d369-bd9b-4795-974e-3426c7866c22 req-b882b177-b859-40be-96b8-d1a72dff007e service nova] Releasing lock "refresh_cache-c972e083-8c91-4875-a8c6-8257b06c93a1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1062.819077] env[70020]: DEBUG nova.scheduler.client.report [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1062.842285] env[70020]: DEBUG nova.scheduler.client.report [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1062.894796] env[70020]: DEBUG nova.network.neutron [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1062.912701] env[70020]: DEBUG nova.network.neutron [req-ac050caf-b292-4145-8391-7369839995cb req-633ea784-a8f3-442b-a260-abc5d224480c service nova] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Updated VIF entry in instance network info cache for port 92b2e42f-1b28-4c86-a59d-e06f2adc11da. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1062.912701] env[70020]: DEBUG nova.network.neutron [req-ac050caf-b292-4145-8391-7369839995cb req-633ea784-a8f3-442b-a260-abc5d224480c service nova] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Updating instance_info_cache with network_info: [{"id": "92b2e42f-1b28-4c86-a59d-e06f2adc11da", "address": "fa:16:3e:f2:cc:c3", "network": {"id": "1f03733c-ef7b-4bed-8ce6-b27357cd0d83", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1581314653-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7252df4458bb4a1283a419877e101bf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e272539-d425-489f-9a63-aba692e88933", "external-id": "nsx-vlan-transportzone-869", "segmentation_id": 869, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92b2e42f-1b", "ovs_interfaceid": "92b2e42f-1b28-4c86-a59d-e06f2adc11da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1062.944874] env[70020]: DEBUG oslo_vmware.api [None req-e0ed91f8-c8f1-4bc3-bf46-763d90361de3 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Task: {'id': task-3618863, 'name': ReconfigVM_Task, 'duration_secs': 0.186573} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.948427] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e0ed91f8-c8f1-4bc3-bf46-763d90361de3 tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Reconfigured VM instance to set the machine id {{(pid=70020) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1062.951795] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618862, 'name': CreateVM_Task, 'duration_secs': 0.346921} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.955728] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1062.955728] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.012649] env[70020]: DEBUG oslo_vmware.api [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5292bd45-6f8e-94ae-615e-c90abb7c5f81, 'name': SearchDatastore_Task, 'duration_secs': 0.018072} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.013203] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1063.013619] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1063.013978] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.014347] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1063.017870] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1063.017870] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1063.017870] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1063.017870] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dde6bb9b-41f2-43b2-9f83-06abaa81f9cf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.020446] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bda7a2c4-108e-49b1-bc7a-5121488fdcc1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.030618] env[70020]: DEBUG oslo_vmware.api [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Waiting for the task: (returnval){ [ 1063.030618] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5211419c-d89b-d666-c7aa-312dab0151d1" [ 1063.030618] env[70020]: _type = "Task" [ 1063.030618] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.034824] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1063.035052] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1063.036538] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c94572d2-c35f-45d9-bbb8-3283e205ff1d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.043508] env[70020]: DEBUG oslo_vmware.api [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5211419c-d89b-d666-c7aa-312dab0151d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.050702] env[70020]: DEBUG oslo_vmware.api [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1063.050702] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52921711-f11f-125e-bea8-058c60bd28a6" [ 1063.050702] env[70020]: _type = "Task" [ 1063.050702] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.060339] env[70020]: DEBUG oslo_vmware.api [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52921711-f11f-125e-bea8-058c60bd28a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.105016] env[70020]: DEBUG oslo_concurrency.lockutils [None req-91c61070-b4f1-44bf-9ee8-5a979ec3056d tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Acquiring lock "3dedfa48-0839-462e-8c32-ba5252f07ac0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1063.105016] env[70020]: DEBUG oslo_concurrency.lockutils [None req-91c61070-b4f1-44bf-9ee8-5a979ec3056d tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Lock "3dedfa48-0839-462e-8c32-ba5252f07ac0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1063.105016] env[70020]: DEBUG oslo_concurrency.lockutils [None req-91c61070-b4f1-44bf-9ee8-5a979ec3056d tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Acquiring lock "3dedfa48-0839-462e-8c32-ba5252f07ac0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1063.105016] env[70020]: DEBUG oslo_concurrency.lockutils [None req-91c61070-b4f1-44bf-9ee8-5a979ec3056d tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Lock "3dedfa48-0839-462e-8c32-ba5252f07ac0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1063.105016] env[70020]: DEBUG oslo_concurrency.lockutils [None req-91c61070-b4f1-44bf-9ee8-5a979ec3056d tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Lock "3dedfa48-0839-462e-8c32-ba5252f07ac0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1063.105016] env[70020]: DEBUG oslo_vmware.api [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Task: {'id': task-3618861, 'name': ReconfigVM_Task, 'duration_secs': 0.678073} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.105357] env[70020]: INFO nova.compute.manager [None req-91c61070-b4f1-44bf-9ee8-5a979ec3056d tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Terminating instance [ 1063.106528] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Reconfigured VM instance instance-00000060 to attach disk [datastore1] 97fe6c57-03de-4cf8-a990-ff4f88db6cd7/97fe6c57-03de-4cf8-a990-ff4f88db6cd7.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1063.110558] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-92955dae-9771-4c3a-b0b5-823676af47f1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.119434] env[70020]: DEBUG oslo_vmware.api [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Waiting for the task: (returnval){ [ 1063.119434] env[70020]: value = "task-3618864" [ 1063.119434] env[70020]: _type = "Task" [ 1063.119434] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.132162] env[70020]: DEBUG oslo_vmware.api [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Task: {'id': task-3618864, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.202293] env[70020]: DEBUG nova.network.neutron [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Updating instance_info_cache with network_info: [{"id": "1b623703-5d19-4e24-b8aa-5b76192f92f5", "address": "fa:16:3e:12:a0:56", "network": {"id": "bff860aa-ea09-4260-84e8-d9ffd2c5bf4b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1893681432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd3733a000724aab9255cb498cecdfba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b623703-5d", "ovs_interfaceid": "1b623703-5d19-4e24-b8aa-5b76192f92f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1063.206154] env[70020]: DEBUG nova.compute.manager [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1063.246156] env[70020]: DEBUG nova.virt.hardware [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1063.246359] env[70020]: DEBUG nova.virt.hardware [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1063.246529] env[70020]: DEBUG nova.virt.hardware [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1063.246714] env[70020]: DEBUG nova.virt.hardware [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1063.246871] env[70020]: DEBUG nova.virt.hardware [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1063.247013] env[70020]: DEBUG nova.virt.hardware [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1063.248608] env[70020]: DEBUG nova.virt.hardware [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1063.248794] env[70020]: DEBUG nova.virt.hardware [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1063.248971] env[70020]: DEBUG nova.virt.hardware [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1063.249174] env[70020]: DEBUG nova.virt.hardware [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1063.249381] env[70020]: DEBUG nova.virt.hardware [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1063.250287] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8f26c82-1f11-40c1-8785-3204e3dfe133 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.265850] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf1a4fa0-eb93-4692-92ed-0af206e3263e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.300572] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39837700-7663-44f9-b711-dacc5c6376c2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.309104] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1653aff-1f50-449c-a316-0add9a403493 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.313193] env[70020]: DEBUG nova.compute.manager [None req-6e9a3a1f-c2ef-4dc1-98eb-e8e9c25236b2 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1063.313406] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6e9a3a1f-c2ef-4dc1-98eb-e8e9c25236b2 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1063.314355] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f2754b0-468d-41b9-8396-af6e9c827ff3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.353370] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e9a3a1f-c2ef-4dc1-98eb-e8e9c25236b2 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1063.353912] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-58c33de5-b73b-4e79-90a0-f7b2f50bc462 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.356504] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89a3432d-2963-4314-8334-d953e4bfa1eb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.369780] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5576087-c5dd-419d-98d5-d16f08acbca2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.375382] env[70020]: DEBUG oslo_vmware.api [None req-6e9a3a1f-c2ef-4dc1-98eb-e8e9c25236b2 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1063.375382] env[70020]: value = "task-3618865" [ 1063.375382] env[70020]: _type = "Task" [ 1063.375382] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.389148] env[70020]: DEBUG nova.compute.provider_tree [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1063.396446] env[70020]: DEBUG oslo_vmware.api [None req-6e9a3a1f-c2ef-4dc1-98eb-e8e9c25236b2 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618865, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.413349] env[70020]: DEBUG oslo_concurrency.lockutils [req-ac050caf-b292-4145-8391-7369839995cb req-633ea784-a8f3-442b-a260-abc5d224480c service nova] Releasing lock "refresh_cache-3dedfa48-0839-462e-8c32-ba5252f07ac0" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1063.413898] env[70020]: DEBUG oslo_concurrency.lockutils [req-35f3c294-bc4f-4ae0-b0e8-5ba30a8e7aa7 req-09bae485-9114-47be-86cf-26b348ba4fb3 service nova] Acquired lock "refresh_cache-3dedfa48-0839-462e-8c32-ba5252f07ac0" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1063.414151] env[70020]: DEBUG nova.network.neutron [req-35f3c294-bc4f-4ae0-b0e8-5ba30a8e7aa7 req-09bae485-9114-47be-86cf-26b348ba4fb3 service nova] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Refreshing network info cache for port 92b2e42f-1b28-4c86-a59d-e06f2adc11da {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1063.450124] env[70020]: DEBUG nova.compute.manager [req-71822742-51fb-42ac-bf8c-65f45d6af93f req-0ec9c79f-f59a-4a58-ba3a-493e4bfc6cbb service nova] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Received event network-vif-plugged-1b623703-5d19-4e24-b8aa-5b76192f92f5 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1063.450339] env[70020]: DEBUG oslo_concurrency.lockutils [req-71822742-51fb-42ac-bf8c-65f45d6af93f req-0ec9c79f-f59a-4a58-ba3a-493e4bfc6cbb service nova] Acquiring lock "9d1568bf-4027-4d4c-b089-276006eee715-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1063.450537] env[70020]: DEBUG oslo_concurrency.lockutils [req-71822742-51fb-42ac-bf8c-65f45d6af93f req-0ec9c79f-f59a-4a58-ba3a-493e4bfc6cbb service nova] Lock "9d1568bf-4027-4d4c-b089-276006eee715-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1063.450697] env[70020]: DEBUG oslo_concurrency.lockutils [req-71822742-51fb-42ac-bf8c-65f45d6af93f req-0ec9c79f-f59a-4a58-ba3a-493e4bfc6cbb service nova] Lock "9d1568bf-4027-4d4c-b089-276006eee715-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1063.451077] env[70020]: DEBUG nova.compute.manager [req-71822742-51fb-42ac-bf8c-65f45d6af93f req-0ec9c79f-f59a-4a58-ba3a-493e4bfc6cbb service nova] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] No waiting events found dispatching network-vif-plugged-1b623703-5d19-4e24-b8aa-5b76192f92f5 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1063.451353] env[70020]: WARNING nova.compute.manager [req-71822742-51fb-42ac-bf8c-65f45d6af93f req-0ec9c79f-f59a-4a58-ba3a-493e4bfc6cbb service nova] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Received unexpected event network-vif-plugged-1b623703-5d19-4e24-b8aa-5b76192f92f5 for instance with vm_state building and task_state spawning. [ 1063.451561] env[70020]: DEBUG nova.compute.manager [req-71822742-51fb-42ac-bf8c-65f45d6af93f req-0ec9c79f-f59a-4a58-ba3a-493e4bfc6cbb service nova] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Received event network-changed-1b623703-5d19-4e24-b8aa-5b76192f92f5 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1063.451806] env[70020]: DEBUG nova.compute.manager [req-71822742-51fb-42ac-bf8c-65f45d6af93f req-0ec9c79f-f59a-4a58-ba3a-493e4bfc6cbb service nova] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Refreshing instance network info cache due to event network-changed-1b623703-5d19-4e24-b8aa-5b76192f92f5. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1063.452050] env[70020]: DEBUG oslo_concurrency.lockutils [req-71822742-51fb-42ac-bf8c-65f45d6af93f req-0ec9c79f-f59a-4a58-ba3a-493e4bfc6cbb service nova] Acquiring lock "refresh_cache-9d1568bf-4027-4d4c-b089-276006eee715" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.544646] env[70020]: DEBUG oslo_vmware.api [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5211419c-d89b-d666-c7aa-312dab0151d1, 'name': SearchDatastore_Task, 'duration_secs': 0.022806} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.545031] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1063.545286] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1063.545530] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.563189] env[70020]: DEBUG oslo_vmware.api [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52921711-f11f-125e-bea8-058c60bd28a6, 'name': SearchDatastore_Task, 'duration_secs': 0.012181} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.564083] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8f594a7-fd2d-49b8-8e96-e653e2485866 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.572587] env[70020]: DEBUG oslo_vmware.api [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1063.572587] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52d39a59-9dd5-34eb-3ba7-fbcf04625f5d" [ 1063.572587] env[70020]: _type = "Task" [ 1063.572587] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.589694] env[70020]: DEBUG oslo_vmware.api [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52d39a59-9dd5-34eb-3ba7-fbcf04625f5d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.614344] env[70020]: DEBUG nova.compute.manager [None req-91c61070-b4f1-44bf-9ee8-5a979ec3056d tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1063.614516] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-91c61070-b4f1-44bf-9ee8-5a979ec3056d tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1063.615470] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c090dc53-ee92-4182-926e-9d3c94195519 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.625237] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-91c61070-b4f1-44bf-9ee8-5a979ec3056d tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1063.625925] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-013e21e4-7836-4f02-91f8-21022ef25e2f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.631054] env[70020]: DEBUG oslo_vmware.api [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Task: {'id': task-3618864, 'name': Rename_Task, 'duration_secs': 0.164629} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.631326] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1063.631566] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ebf75b04-1677-4718-af3b-3be45d0da91c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.634299] env[70020]: DEBUG oslo_vmware.api [None req-91c61070-b4f1-44bf-9ee8-5a979ec3056d tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Waiting for the task: (returnval){ [ 1063.634299] env[70020]: value = "task-3618866" [ 1063.634299] env[70020]: _type = "Task" [ 1063.634299] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.639614] env[70020]: DEBUG oslo_vmware.api [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Waiting for the task: (returnval){ [ 1063.639614] env[70020]: value = "task-3618867" [ 1063.639614] env[70020]: _type = "Task" [ 1063.639614] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.649349] env[70020]: DEBUG oslo_vmware.api [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Task: {'id': task-3618867, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.652780] env[70020]: DEBUG oslo_vmware.api [None req-91c61070-b4f1-44bf-9ee8-5a979ec3056d tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Task: {'id': task-3618866, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.705019] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Releasing lock "refresh_cache-9d1568bf-4027-4d4c-b089-276006eee715" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1063.705827] env[70020]: DEBUG nova.compute.manager [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Instance network_info: |[{"id": "1b623703-5d19-4e24-b8aa-5b76192f92f5", "address": "fa:16:3e:12:a0:56", "network": {"id": "bff860aa-ea09-4260-84e8-d9ffd2c5bf4b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1893681432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd3733a000724aab9255cb498cecdfba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b623703-5d", "ovs_interfaceid": "1b623703-5d19-4e24-b8aa-5b76192f92f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1063.706199] env[70020]: DEBUG oslo_concurrency.lockutils [req-71822742-51fb-42ac-bf8c-65f45d6af93f req-0ec9c79f-f59a-4a58-ba3a-493e4bfc6cbb service nova] Acquired lock "refresh_cache-9d1568bf-4027-4d4c-b089-276006eee715" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1063.706351] env[70020]: DEBUG nova.network.neutron [req-71822742-51fb-42ac-bf8c-65f45d6af93f req-0ec9c79f-f59a-4a58-ba3a-493e4bfc6cbb service nova] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Refreshing network info cache for port 1b623703-5d19-4e24-b8aa-5b76192f92f5 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1063.707631] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:12:a0:56', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3952eb02-1162-48ed-8227-9c138960d583', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1b623703-5d19-4e24-b8aa-5b76192f92f5', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1063.716363] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1063.718273] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1063.721046] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e63804ce-cb87-4ab3-8edb-dbfe0dd5f6dd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.737316] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "f1a09304-7725-489a-8669-322a51c709e5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1063.737576] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "f1a09304-7725-489a-8669-322a51c709e5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1063.749051] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1063.749051] env[70020]: value = "task-3618868" [ 1063.749051] env[70020]: _type = "Task" [ 1063.749051] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.760191] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618868, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.889648] env[70020]: DEBUG oslo_vmware.api [None req-6e9a3a1f-c2ef-4dc1-98eb-e8e9c25236b2 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618865, 'name': PowerOffVM_Task, 'duration_secs': 0.235956} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.889980] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e9a3a1f-c2ef-4dc1-98eb-e8e9c25236b2 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1063.890088] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6e9a3a1f-c2ef-4dc1-98eb-e8e9c25236b2 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1063.890338] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bff3f7ef-3fae-4561-b1a5-6a30ec1efeb0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.915624] env[70020]: ERROR nova.scheduler.client.report [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [req-47c99ce5-83ae-4649-b4fd-e74500e1aa85] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-47c99ce5-83ae-4649-b4fd-e74500e1aa85"}]} [ 1063.935909] env[70020]: DEBUG nova.scheduler.client.report [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1063.952161] env[70020]: DEBUG nova.scheduler.client.report [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1063.952454] env[70020]: DEBUG nova.compute.provider_tree [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1063.967910] env[70020]: DEBUG nova.scheduler.client.report [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1063.990020] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6e9a3a1f-c2ef-4dc1-98eb-e8e9c25236b2 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1063.990412] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6e9a3a1f-c2ef-4dc1-98eb-e8e9c25236b2 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1063.990652] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e9a3a1f-c2ef-4dc1-98eb-e8e9c25236b2 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Deleting the datastore file [datastore1] 04de1a07-cf38-41e0-be96-237bbe1ead83 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1063.990955] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4cc5d28c-879c-4e33-a4f6-f8b67461b199 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.999824] env[70020]: DEBUG oslo_vmware.api [None req-6e9a3a1f-c2ef-4dc1-98eb-e8e9c25236b2 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1063.999824] env[70020]: value = "task-3618870" [ 1063.999824] env[70020]: _type = "Task" [ 1063.999824] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.000930] env[70020]: DEBUG nova.scheduler.client.report [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1064.015476] env[70020]: DEBUG oslo_vmware.api [None req-6e9a3a1f-c2ef-4dc1-98eb-e8e9c25236b2 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618870, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.088385] env[70020]: DEBUG oslo_vmware.api [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52d39a59-9dd5-34eb-3ba7-fbcf04625f5d, 'name': SearchDatastore_Task, 'duration_secs': 0.014036} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.091300] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1064.091623] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] c972e083-8c91-4875-a8c6-8257b06c93a1/c972e083-8c91-4875-a8c6-8257b06c93a1.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1064.095163] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1064.095163] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1064.095163] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a89491fe-48ff-461b-8e50-55563f2355e5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.097319] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a22a51ad-504c-4c73-9ccb-9dc365827c86 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.107467] env[70020]: DEBUG oslo_vmware.api [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1064.107467] env[70020]: value = "task-3618871" [ 1064.107467] env[70020]: _type = "Task" [ 1064.107467] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.115833] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1064.116054] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1064.118054] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a1a2875-77c2-447a-ab2e-715697a3b382 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.128269] env[70020]: DEBUG oslo_vmware.api [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3618871, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.134939] env[70020]: DEBUG oslo_vmware.api [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Waiting for the task: (returnval){ [ 1064.134939] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52ef8251-eb2d-b799-0c2f-5b44981bc3b1" [ 1064.134939] env[70020]: _type = "Task" [ 1064.134939] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.167176] env[70020]: DEBUG oslo_vmware.api [None req-91c61070-b4f1-44bf-9ee8-5a979ec3056d tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Task: {'id': task-3618866, 'name': PowerOffVM_Task, 'duration_secs': 0.195088} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.168271] env[70020]: DEBUG oslo_vmware.api [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Task: {'id': task-3618867, 'name': PowerOnVM_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.171187] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-91c61070-b4f1-44bf-9ee8-5a979ec3056d tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1064.171187] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-91c61070-b4f1-44bf-9ee8-5a979ec3056d tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1064.171187] env[70020]: DEBUG oslo_vmware.api [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ef8251-eb2d-b799-0c2f-5b44981bc3b1, 'name': SearchDatastore_Task, 'duration_secs': 0.020117} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.171377] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2e4b12df-5639-4eb9-bc8d-64f92cce19e4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.177159] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2902590a-4aa9-4aad-ab5e-a2f2da9dc6b8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.191961] env[70020]: DEBUG oslo_vmware.api [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Waiting for the task: (returnval){ [ 1064.191961] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]528e21db-ab68-e9eb-c2f2-380da9f6ff83" [ 1064.191961] env[70020]: _type = "Task" [ 1064.191961] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.200196] env[70020]: INFO nova.compute.manager [None req-6fdd9544-f901-49f8-840a-9ae51702aba4 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Unrescuing [ 1064.200196] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6fdd9544-f901-49f8-840a-9ae51702aba4 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquiring lock "refresh_cache-b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1064.200196] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6fdd9544-f901-49f8-840a-9ae51702aba4 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquired lock "refresh_cache-b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1064.200196] env[70020]: DEBUG nova.network.neutron [None req-6fdd9544-f901-49f8-840a-9ae51702aba4 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1064.207229] env[70020]: DEBUG oslo_vmware.api [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]528e21db-ab68-e9eb-c2f2-380da9f6ff83, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.241562] env[70020]: DEBUG nova.compute.manager [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1064.260279] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618868, 'name': CreateVM_Task} progress is 25%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.278741] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-91c61070-b4f1-44bf-9ee8-5a979ec3056d tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1064.278936] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-91c61070-b4f1-44bf-9ee8-5a979ec3056d tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1064.279138] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-91c61070-b4f1-44bf-9ee8-5a979ec3056d tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Deleting the datastore file [datastore1] 3dedfa48-0839-462e-8c32-ba5252f07ac0 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1064.279426] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-475a1ff2-f1dd-454e-a4de-0925ea8d3f6a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.289536] env[70020]: DEBUG oslo_vmware.api [None req-91c61070-b4f1-44bf-9ee8-5a979ec3056d tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Waiting for the task: (returnval){ [ 1064.289536] env[70020]: value = "task-3618873" [ 1064.289536] env[70020]: _type = "Task" [ 1064.289536] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.302358] env[70020]: DEBUG oslo_vmware.api [None req-91c61070-b4f1-44bf-9ee8-5a979ec3056d tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Task: {'id': task-3618873, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.317063] env[70020]: DEBUG nova.network.neutron [req-35f3c294-bc4f-4ae0-b0e8-5ba30a8e7aa7 req-09bae485-9114-47be-86cf-26b348ba4fb3 service nova] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Updated VIF entry in instance network info cache for port 92b2e42f-1b28-4c86-a59d-e06f2adc11da. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1064.317457] env[70020]: DEBUG nova.network.neutron [req-35f3c294-bc4f-4ae0-b0e8-5ba30a8e7aa7 req-09bae485-9114-47be-86cf-26b348ba4fb3 service nova] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Updating instance_info_cache with network_info: [{"id": "92b2e42f-1b28-4c86-a59d-e06f2adc11da", "address": "fa:16:3e:f2:cc:c3", "network": {"id": "1f03733c-ef7b-4bed-8ce6-b27357cd0d83", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1581314653-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7252df4458bb4a1283a419877e101bf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e272539-d425-489f-9a63-aba692e88933", "external-id": "nsx-vlan-transportzone-869", "segmentation_id": 869, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92b2e42f-1b", "ovs_interfaceid": "92b2e42f-1b28-4c86-a59d-e06f2adc11da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1064.436223] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8e4700e-c27f-4a73-8254-7015be14f231 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.444577] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5883689-5983-4370-ba6b-6abdcef706f6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.490956] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d419ca4-798d-4fdb-a7e7-585479bf2de3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.494708] env[70020]: DEBUG nova.network.neutron [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Successfully updated port: 8e1b8b9c-b1c2-448e-8d9c-621c1810194a {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1064.506579] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57fa30fa-a2b4-46cf-b960-0cd83fe341b7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.560967] env[70020]: DEBUG nova.compute.provider_tree [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1064.563149] env[70020]: DEBUG oslo_vmware.api [None req-6e9a3a1f-c2ef-4dc1-98eb-e8e9c25236b2 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618870, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.34593} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.567301] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Acquiring lock "db24c4e0-f778-4488-b9cb-a06b21932b4e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1064.567650] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Lock "db24c4e0-f778-4488-b9cb-a06b21932b4e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1064.569954] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e9a3a1f-c2ef-4dc1-98eb-e8e9c25236b2 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1064.570278] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6e9a3a1f-c2ef-4dc1-98eb-e8e9c25236b2 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1064.570742] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6e9a3a1f-c2ef-4dc1-98eb-e8e9c25236b2 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1064.570892] env[70020]: INFO nova.compute.manager [None req-6e9a3a1f-c2ef-4dc1-98eb-e8e9c25236b2 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Took 1.26 seconds to destroy the instance on the hypervisor. [ 1064.571307] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6e9a3a1f-c2ef-4dc1-98eb-e8e9c25236b2 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1064.572126] env[70020]: DEBUG nova.compute.manager [-] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1064.572289] env[70020]: DEBUG nova.network.neutron [-] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1064.622681] env[70020]: DEBUG oslo_vmware.api [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3618871, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.646151] env[70020]: DEBUG nova.network.neutron [req-71822742-51fb-42ac-bf8c-65f45d6af93f req-0ec9c79f-f59a-4a58-ba3a-493e4bfc6cbb service nova] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Updated VIF entry in instance network info cache for port 1b623703-5d19-4e24-b8aa-5b76192f92f5. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1064.646804] env[70020]: DEBUG nova.network.neutron [req-71822742-51fb-42ac-bf8c-65f45d6af93f req-0ec9c79f-f59a-4a58-ba3a-493e4bfc6cbb service nova] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Updating instance_info_cache with network_info: [{"id": "1b623703-5d19-4e24-b8aa-5b76192f92f5", "address": "fa:16:3e:12:a0:56", "network": {"id": "bff860aa-ea09-4260-84e8-d9ffd2c5bf4b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1893681432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd3733a000724aab9255cb498cecdfba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b623703-5d", "ovs_interfaceid": "1b623703-5d19-4e24-b8aa-5b76192f92f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1064.668950] env[70020]: DEBUG oslo_vmware.api [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Task: {'id': task-3618867, 'name': PowerOnVM_Task, 'duration_secs': 0.525148} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.669378] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1064.671305] env[70020]: INFO nova.compute.manager [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Took 8.94 seconds to spawn the instance on the hypervisor. [ 1064.671305] env[70020]: DEBUG nova.compute.manager [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1064.671305] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-818363ee-d1dd-4567-b45c-0952d2cacf95 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.707741] env[70020]: DEBUG oslo_vmware.api [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]528e21db-ab68-e9eb-c2f2-380da9f6ff83, 'name': SearchDatastore_Task, 'duration_secs': 0.018742} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.708404] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1064.708672] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 1ddd5a29-075b-482a-a6e9-4c7345673a00/1ddd5a29-075b-482a-a6e9-4c7345673a00.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1064.709342] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bc6d925d-02ab-4875-b29b-2e5f2ab22116 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.720577] env[70020]: DEBUG oslo_vmware.api [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Waiting for the task: (returnval){ [ 1064.720577] env[70020]: value = "task-3618874" [ 1064.720577] env[70020]: _type = "Task" [ 1064.720577] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.733952] env[70020]: DEBUG oslo_vmware.api [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': task-3618874, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.768166] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618868, 'name': CreateVM_Task, 'duration_secs': 0.695347} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.768587] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1064.769428] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1064.769676] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1064.770599] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1064.771572] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1064.773879] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dccad194-4eab-4e99-8570-9644db14c8f1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.783247] env[70020]: DEBUG oslo_vmware.api [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1064.783247] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]520008a6-ba34-3dcf-c2a7-5bf4760f1693" [ 1064.783247] env[70020]: _type = "Task" [ 1064.783247] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.799872] env[70020]: DEBUG oslo_vmware.api [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]520008a6-ba34-3dcf-c2a7-5bf4760f1693, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.807528] env[70020]: DEBUG oslo_vmware.api [None req-91c61070-b4f1-44bf-9ee8-5a979ec3056d tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Task: {'id': task-3618873, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.822382] env[70020]: DEBUG oslo_concurrency.lockutils [req-35f3c294-bc4f-4ae0-b0e8-5ba30a8e7aa7 req-09bae485-9114-47be-86cf-26b348ba4fb3 service nova] Releasing lock "refresh_cache-3dedfa48-0839-462e-8c32-ba5252f07ac0" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1064.997956] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "refresh_cache-8dbb1de0-38de-493f-9512-b8754bab7bcb" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1064.998106] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquired lock "refresh_cache-8dbb1de0-38de-493f-9512-b8754bab7bcb" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1064.998255] env[70020]: DEBUG nova.network.neutron [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1065.072653] env[70020]: DEBUG nova.compute.manager [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1065.081521] env[70020]: DEBUG nova.network.neutron [None req-6fdd9544-f901-49f8-840a-9ae51702aba4 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Updating instance_info_cache with network_info: [{"id": "9e9d26c4-eeea-4e28-84a1-156d81e4466a", "address": "fa:16:3e:17:d6:22", "network": {"id": "5481228b-9a6b-468e-bca7-0123c469ae56", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-340609977-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e7aae0b70f9d465ebcb9defe385fa434", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e9d26c4-ee", "ovs_interfaceid": "9e9d26c4-eeea-4e28-84a1-156d81e4466a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1065.100086] env[70020]: ERROR nova.scheduler.client.report [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] [req-72c6d769-c7bb-4d51-bbf2-5f265f3e9f09] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-72c6d769-c7bb-4d51-bbf2-5f265f3e9f09"}]} [ 1065.120591] env[70020]: DEBUG oslo_vmware.api [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3618871, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.714544} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.121601] env[70020]: DEBUG nova.scheduler.client.report [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1065.123747] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] c972e083-8c91-4875-a8c6-8257b06c93a1/c972e083-8c91-4875-a8c6-8257b06c93a1.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1065.123966] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1065.124636] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6775c3e4-a3ea-4678-8ef3-e4c93ee21ba8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.135475] env[70020]: DEBUG oslo_vmware.api [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1065.135475] env[70020]: value = "task-3618875" [ 1065.135475] env[70020]: _type = "Task" [ 1065.135475] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.145597] env[70020]: DEBUG nova.scheduler.client.report [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1065.145597] env[70020]: DEBUG nova.compute.provider_tree [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1065.151150] env[70020]: DEBUG oslo_concurrency.lockutils [req-71822742-51fb-42ac-bf8c-65f45d6af93f req-0ec9c79f-f59a-4a58-ba3a-493e4bfc6cbb service nova] Releasing lock "refresh_cache-9d1568bf-4027-4d4c-b089-276006eee715" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1065.151150] env[70020]: DEBUG oslo_vmware.api [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3618875, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.161101] env[70020]: DEBUG nova.scheduler.client.report [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1065.183136] env[70020]: DEBUG nova.scheduler.client.report [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1065.199187] env[70020]: INFO nova.compute.manager [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Took 44.76 seconds to build instance. [ 1065.239179] env[70020]: DEBUG oslo_vmware.api [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': task-3618874, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.305899] env[70020]: DEBUG oslo_vmware.api [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]520008a6-ba34-3dcf-c2a7-5bf4760f1693, 'name': SearchDatastore_Task, 'duration_secs': 0.021679} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.306162] env[70020]: DEBUG oslo_vmware.api [None req-91c61070-b4f1-44bf-9ee8-5a979ec3056d tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Task: {'id': task-3618873, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.529597} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.310108] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1065.310108] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1065.310108] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1065.310108] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1065.310108] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1065.310368] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-91c61070-b4f1-44bf-9ee8-5a979ec3056d tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1065.310368] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-91c61070-b4f1-44bf-9ee8-5a979ec3056d tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1065.310543] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-91c61070-b4f1-44bf-9ee8-5a979ec3056d tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1065.310686] env[70020]: INFO nova.compute.manager [None req-91c61070-b4f1-44bf-9ee8-5a979ec3056d tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Took 1.70 seconds to destroy the instance on the hypervisor. [ 1065.310927] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-91c61070-b4f1-44bf-9ee8-5a979ec3056d tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1065.311387] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-51e48793-145b-4b34-8a36-22f35d8b9796 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.313556] env[70020]: DEBUG nova.compute.manager [-] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1065.313556] env[70020]: DEBUG nova.network.neutron [-] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1065.330918] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1065.332015] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1065.332015] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6fd1cfc8-b408-4e43-9945-81266c27d161 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.344873] env[70020]: DEBUG oslo_vmware.api [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1065.344873] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52bb4901-a57b-d473-4704-649bc610bade" [ 1065.344873] env[70020]: _type = "Task" [ 1065.344873] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.355379] env[70020]: DEBUG oslo_vmware.api [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52bb4901-a57b-d473-4704-649bc610bade, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.401649] env[70020]: DEBUG nova.network.neutron [-] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1065.517626] env[70020]: DEBUG nova.compute.manager [req-1e595865-da95-4cc4-9e87-0134b49332f1 req-f98a8597-dca7-4c6d-9b2e-e6f9bbc0a10b service nova] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Received event network-vif-plugged-8e1b8b9c-b1c2-448e-8d9c-621c1810194a {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1065.517896] env[70020]: DEBUG oslo_concurrency.lockutils [req-1e595865-da95-4cc4-9e87-0134b49332f1 req-f98a8597-dca7-4c6d-9b2e-e6f9bbc0a10b service nova] Acquiring lock "8dbb1de0-38de-493f-9512-b8754bab7bcb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1065.518495] env[70020]: DEBUG oslo_concurrency.lockutils [req-1e595865-da95-4cc4-9e87-0134b49332f1 req-f98a8597-dca7-4c6d-9b2e-e6f9bbc0a10b service nova] Lock "8dbb1de0-38de-493f-9512-b8754bab7bcb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1065.518679] env[70020]: DEBUG oslo_concurrency.lockutils [req-1e595865-da95-4cc4-9e87-0134b49332f1 req-f98a8597-dca7-4c6d-9b2e-e6f9bbc0a10b service nova] Lock "8dbb1de0-38de-493f-9512-b8754bab7bcb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1065.518852] env[70020]: DEBUG nova.compute.manager [req-1e595865-da95-4cc4-9e87-0134b49332f1 req-f98a8597-dca7-4c6d-9b2e-e6f9bbc0a10b service nova] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] No waiting events found dispatching network-vif-plugged-8e1b8b9c-b1c2-448e-8d9c-621c1810194a {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1065.519028] env[70020]: WARNING nova.compute.manager [req-1e595865-da95-4cc4-9e87-0134b49332f1 req-f98a8597-dca7-4c6d-9b2e-e6f9bbc0a10b service nova] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Received unexpected event network-vif-plugged-8e1b8b9c-b1c2-448e-8d9c-621c1810194a for instance with vm_state building and task_state spawning. [ 1065.519196] env[70020]: DEBUG nova.compute.manager [req-1e595865-da95-4cc4-9e87-0134b49332f1 req-f98a8597-dca7-4c6d-9b2e-e6f9bbc0a10b service nova] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Received event network-changed-8e1b8b9c-b1c2-448e-8d9c-621c1810194a {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1065.519385] env[70020]: DEBUG nova.compute.manager [req-1e595865-da95-4cc4-9e87-0134b49332f1 req-f98a8597-dca7-4c6d-9b2e-e6f9bbc0a10b service nova] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Refreshing instance network info cache due to event network-changed-8e1b8b9c-b1c2-448e-8d9c-621c1810194a. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1065.519599] env[70020]: DEBUG oslo_concurrency.lockutils [req-1e595865-da95-4cc4-9e87-0134b49332f1 req-f98a8597-dca7-4c6d-9b2e-e6f9bbc0a10b service nova] Acquiring lock "refresh_cache-8dbb1de0-38de-493f-9512-b8754bab7bcb" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1065.544304] env[70020]: DEBUG nova.network.neutron [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1065.562048] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d4c0a7b-4dd4-46e3-8091-0be14863cfdb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.573402] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2571258-a9b7-4b61-9eef-96fbffce0b8b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.611432] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6fdd9544-f901-49f8-840a-9ae51702aba4 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Releasing lock "refresh_cache-b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1065.612991] env[70020]: DEBUG nova.objects.instance [None req-6fdd9544-f901-49f8-840a-9ae51702aba4 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lazy-loading 'flavor' on Instance uuid b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1065.620062] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edf98a9a-43e7-4c1c-bc9b-842f95efc6de {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.630092] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5749e1a-68f3-484f-bb34-2acdf2e7ff27 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.640643] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1065.650649] env[70020]: DEBUG nova.compute.provider_tree [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1065.661808] env[70020]: DEBUG oslo_vmware.api [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3618875, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.342752} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.662711] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1065.663553] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcf94d8f-cd0c-463d-adcc-1f9384a2c091 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.689524] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] c972e083-8c91-4875-a8c6-8257b06c93a1/c972e083-8c91-4875-a8c6-8257b06c93a1.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1065.692412] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7676637e-ceee-4243-bfc5-247aa0a54b68 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.706898] env[70020]: DEBUG oslo_concurrency.lockutils [None req-05a9fe88-b7e5-4ebd-94e7-8ed34aaa0c73 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Lock "97fe6c57-03de-4cf8-a990-ff4f88db6cd7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.282s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1065.714533] env[70020]: DEBUG oslo_vmware.api [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1065.714533] env[70020]: value = "task-3618876" [ 1065.714533] env[70020]: _type = "Task" [ 1065.714533] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.723569] env[70020]: DEBUG oslo_vmware.api [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3618876, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.732412] env[70020]: DEBUG oslo_vmware.api [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': task-3618874, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.760511} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.732659] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 1ddd5a29-075b-482a-a6e9-4c7345673a00/1ddd5a29-075b-482a-a6e9-4c7345673a00.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1065.732960] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1065.733136] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6665e7be-48e6-417c-a95b-4eae55005100 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.741685] env[70020]: DEBUG nova.network.neutron [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Updating instance_info_cache with network_info: [{"id": "8e1b8b9c-b1c2-448e-8d9c-621c1810194a", "address": "fa:16:3e:44:89:42", "network": {"id": "b566519a-edda-4c57-92ca-a702e586c638", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-790170115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74b060ffb3ac4ecd95dcd85d4744dc2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e1b8b9c-b1", "ovs_interfaceid": "8e1b8b9c-b1c2-448e-8d9c-621c1810194a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1065.745305] env[70020]: DEBUG oslo_vmware.api [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Waiting for the task: (returnval){ [ 1065.745305] env[70020]: value = "task-3618877" [ 1065.745305] env[70020]: _type = "Task" [ 1065.745305] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.755809] env[70020]: DEBUG oslo_vmware.api [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': task-3618877, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.855921] env[70020]: DEBUG oslo_vmware.api [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52bb4901-a57b-d473-4704-649bc610bade, 'name': SearchDatastore_Task, 'duration_secs': 0.057916} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.856761] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-429188b1-76d4-41f1-9c13-8d11d4ce8436 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.863039] env[70020]: DEBUG oslo_vmware.api [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1065.863039] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52f82f23-22bf-4647-47d9-be764ecb64c0" [ 1065.863039] env[70020]: _type = "Task" [ 1065.863039] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.871404] env[70020]: DEBUG oslo_vmware.api [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52f82f23-22bf-4647-47d9-be764ecb64c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.904567] env[70020]: INFO nova.compute.manager [-] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Took 1.33 seconds to deallocate network for instance. [ 1066.119970] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f1b0af0-3251-4333-adb6-4ed2eafbcc73 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.150601] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fdd9544-f901-49f8-840a-9ae51702aba4 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1066.154045] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7d4f56fe-9951-4557-ae57-9cd8184588c0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.162795] env[70020]: DEBUG oslo_vmware.api [None req-6fdd9544-f901-49f8-840a-9ae51702aba4 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1066.162795] env[70020]: value = "task-3618878" [ 1066.162795] env[70020]: _type = "Task" [ 1066.162795] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.177986] env[70020]: DEBUG oslo_vmware.api [None req-6fdd9544-f901-49f8-840a-9ae51702aba4 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618878, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.198701] env[70020]: DEBUG nova.scheduler.client.report [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Updated inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with generation 136 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1066.198701] env[70020]: DEBUG nova.compute.provider_tree [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Updating resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d generation from 136 to 137 during operation: update_inventory {{(pid=70020) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1066.198701] env[70020]: DEBUG nova.compute.provider_tree [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1066.230385] env[70020]: DEBUG oslo_vmware.api [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3618876, 'name': ReconfigVM_Task, 'duration_secs': 0.334347} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.230662] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Reconfigured VM instance instance-00000061 to attach disk [datastore1] c972e083-8c91-4875-a8c6-8257b06c93a1/c972e083-8c91-4875-a8c6-8257b06c93a1.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1066.231356] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-19f4cd1e-6712-4777-bc67-0a99d5345280 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.239616] env[70020]: DEBUG oslo_vmware.api [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1066.239616] env[70020]: value = "task-3618879" [ 1066.239616] env[70020]: _type = "Task" [ 1066.239616] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.245833] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Releasing lock "refresh_cache-8dbb1de0-38de-493f-9512-b8754bab7bcb" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1066.246242] env[70020]: DEBUG nova.compute.manager [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Instance network_info: |[{"id": "8e1b8b9c-b1c2-448e-8d9c-621c1810194a", "address": "fa:16:3e:44:89:42", "network": {"id": "b566519a-edda-4c57-92ca-a702e586c638", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-790170115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74b060ffb3ac4ecd95dcd85d4744dc2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e1b8b9c-b1", "ovs_interfaceid": "8e1b8b9c-b1c2-448e-8d9c-621c1810194a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1066.247548] env[70020]: DEBUG oslo_concurrency.lockutils [req-1e595865-da95-4cc4-9e87-0134b49332f1 req-f98a8597-dca7-4c6d-9b2e-e6f9bbc0a10b service nova] Acquired lock "refresh_cache-8dbb1de0-38de-493f-9512-b8754bab7bcb" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1066.247774] env[70020]: DEBUG nova.network.neutron [req-1e595865-da95-4cc4-9e87-0134b49332f1 req-f98a8597-dca7-4c6d-9b2e-e6f9bbc0a10b service nova] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Refreshing network info cache for port 8e1b8b9c-b1c2-448e-8d9c-621c1810194a {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1066.248946] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:44:89:42', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1002b79b-224e-41e3-a484-4245a767147a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8e1b8b9c-b1c2-448e-8d9c-621c1810194a', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1066.259599] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1066.269294] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1066.269482] env[70020]: DEBUG oslo_vmware.api [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3618879, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.269996] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cba07eb9-b553-42ce-9f6e-dff10c0d2bc2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.293828] env[70020]: DEBUG oslo_vmware.api [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': task-3618877, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.11381} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.295715] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1066.296087] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1066.296087] env[70020]: value = "task-3618880" [ 1066.296087] env[70020]: _type = "Task" [ 1066.296087] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.297515] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3451f743-74c0-4427-bdc2-f71b33d68cdf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.322907] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] 1ddd5a29-075b-482a-a6e9-4c7345673a00/1ddd5a29-075b-482a-a6e9-4c7345673a00.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1066.329927] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d082d955-3f20-493c-b5e0-40649d906bb2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.348166] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618880, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.354828] env[70020]: DEBUG oslo_vmware.api [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Waiting for the task: (returnval){ [ 1066.354828] env[70020]: value = "task-3618881" [ 1066.354828] env[70020]: _type = "Task" [ 1066.354828] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.364349] env[70020]: DEBUG oslo_vmware.api [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': task-3618881, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.375074] env[70020]: DEBUG oslo_vmware.api [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52f82f23-22bf-4647-47d9-be764ecb64c0, 'name': SearchDatastore_Task, 'duration_secs': 0.021544} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.375226] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1066.375421] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 9d1568bf-4027-4d4c-b089-276006eee715/9d1568bf-4027-4d4c-b089-276006eee715.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1066.375712] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-50d6a26f-dcd4-4a7e-a5b3-f615c75173e7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.384600] env[70020]: DEBUG oslo_vmware.api [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1066.384600] env[70020]: value = "task-3618882" [ 1066.384600] env[70020]: _type = "Task" [ 1066.384600] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.395416] env[70020]: DEBUG oslo_vmware.api [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618882, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.417404] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6e9a3a1f-c2ef-4dc1-98eb-e8e9c25236b2 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1066.570450] env[70020]: DEBUG nova.network.neutron [req-1e595865-da95-4cc4-9e87-0134b49332f1 req-f98a8597-dca7-4c6d-9b2e-e6f9bbc0a10b service nova] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Updated VIF entry in instance network info cache for port 8e1b8b9c-b1c2-448e-8d9c-621c1810194a. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1066.570803] env[70020]: DEBUG nova.network.neutron [req-1e595865-da95-4cc4-9e87-0134b49332f1 req-f98a8597-dca7-4c6d-9b2e-e6f9bbc0a10b service nova] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Updating instance_info_cache with network_info: [{"id": "8e1b8b9c-b1c2-448e-8d9c-621c1810194a", "address": "fa:16:3e:44:89:42", "network": {"id": "b566519a-edda-4c57-92ca-a702e586c638", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-790170115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74b060ffb3ac4ecd95dcd85d4744dc2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e1b8b9c-b1", "ovs_interfaceid": "8e1b8b9c-b1c2-448e-8d9c-621c1810194a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1066.676402] env[70020]: DEBUG oslo_vmware.api [None req-6fdd9544-f901-49f8-840a-9ae51702aba4 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618878, 'name': PowerOffVM_Task, 'duration_secs': 0.369382} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.676693] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fdd9544-f901-49f8-840a-9ae51702aba4 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1066.682758] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-6fdd9544-f901-49f8-840a-9ae51702aba4 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Reconfiguring VM instance instance-0000005c to detach disk 2001 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1066.682758] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6bbd25ac-0b26-4ea6-b646-f84c43f164af {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.704818] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 5.527s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1066.707257] env[70020]: DEBUG oslo_vmware.api [None req-6fdd9544-f901-49f8-840a-9ae51702aba4 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1066.707257] env[70020]: value = "task-3618883" [ 1066.707257] env[70020]: _type = "Task" [ 1066.707257] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.707764] env[70020]: DEBUG oslo_concurrency.lockutils [None req-12405c8e-d864-4f6b-b515-ff2357d0da1e tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.046s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1066.707979] env[70020]: DEBUG nova.objects.instance [None req-12405c8e-d864-4f6b-b515-ff2357d0da1e tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Lazy-loading 'resources' on Instance uuid d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1066.720778] env[70020]: DEBUG oslo_vmware.api [None req-6fdd9544-f901-49f8-840a-9ae51702aba4 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618883, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.731213] env[70020]: INFO nova.scheduler.client.report [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Deleted allocations for instance da07cb36-244f-4f48-a5b6-8d00324c1edf [ 1066.752016] env[70020]: DEBUG oslo_vmware.api [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3618879, 'name': Rename_Task, 'duration_secs': 0.171931} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.752238] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1066.752528] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a676b108-7131-4d90-83d3-4ef79dfd25fd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.763026] env[70020]: DEBUG oslo_vmware.api [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1066.763026] env[70020]: value = "task-3618884" [ 1066.763026] env[70020]: _type = "Task" [ 1066.763026] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.775659] env[70020]: DEBUG oslo_vmware.api [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3618884, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.819176] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618880, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.839918] env[70020]: DEBUG nova.compute.manager [req-a24d9daf-44cd-4024-9b24-47d4e187be58 req-4ad2949c-380c-47c6-a5ea-d0137349308c service nova] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Received event network-vif-deleted-92b2e42f-1b28-4c86-a59d-e06f2adc11da {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1066.840158] env[70020]: INFO nova.compute.manager [req-a24d9daf-44cd-4024-9b24-47d4e187be58 req-4ad2949c-380c-47c6-a5ea-d0137349308c service nova] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Neutron deleted interface 92b2e42f-1b28-4c86-a59d-e06f2adc11da; detaching it from the instance and deleting it from the info cache [ 1066.840337] env[70020]: DEBUG nova.network.neutron [req-a24d9daf-44cd-4024-9b24-47d4e187be58 req-4ad2949c-380c-47c6-a5ea-d0137349308c service nova] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1066.870222] env[70020]: DEBUG oslo_vmware.api [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': task-3618881, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.899196] env[70020]: DEBUG oslo_vmware.api [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618882, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.922547] env[70020]: DEBUG nova.network.neutron [-] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1067.073285] env[70020]: DEBUG oslo_concurrency.lockutils [req-1e595865-da95-4cc4-9e87-0134b49332f1 req-f98a8597-dca7-4c6d-9b2e-e6f9bbc0a10b service nova] Releasing lock "refresh_cache-8dbb1de0-38de-493f-9512-b8754bab7bcb" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1067.073555] env[70020]: DEBUG nova.compute.manager [req-1e595865-da95-4cc4-9e87-0134b49332f1 req-f98a8597-dca7-4c6d-9b2e-e6f9bbc0a10b service nova] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Received event network-vif-deleted-12ace8f2-7b35-437a-aba2-e371201f3343 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1067.229137] env[70020]: DEBUG oslo_vmware.api [None req-6fdd9544-f901-49f8-840a-9ae51702aba4 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618883, 'name': ReconfigVM_Task, 'duration_secs': 0.445049} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.229901] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-6fdd9544-f901-49f8-840a-9ae51702aba4 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Reconfigured VM instance instance-0000005c to detach disk 2001 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1067.229901] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fdd9544-f901-49f8-840a-9ae51702aba4 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1067.230163] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e6aaae96-0060-4d28-a67d-e0c56b3a0c3d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.244356] env[70020]: DEBUG oslo_vmware.api [None req-6fdd9544-f901-49f8-840a-9ae51702aba4 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1067.244356] env[70020]: value = "task-3618885" [ 1067.244356] env[70020]: _type = "Task" [ 1067.244356] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.245170] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d0008684-3e2e-4e51-947e-d302a2eeb8c9 tempest-AttachVolumeTestJSON-972861423 tempest-AttachVolumeTestJSON-972861423-project-member] Lock "da07cb36-244f-4f48-a5b6-8d00324c1edf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.498s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1067.261058] env[70020]: DEBUG oslo_vmware.api [None req-6fdd9544-f901-49f8-840a-9ae51702aba4 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618885, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.275515] env[70020]: DEBUG oslo_vmware.api [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3618884, 'name': PowerOnVM_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.315957] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618880, 'name': CreateVM_Task, 'duration_secs': 0.581177} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.319285] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1067.319917] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1067.320171] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1067.320608] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1067.321581] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d283afc1-b8a6-4b20-9d6a-583219622673 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.327219] env[70020]: DEBUG oslo_vmware.api [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1067.327219] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]525213bc-27b4-f1dc-6a06-dde8784e8c3f" [ 1067.327219] env[70020]: _type = "Task" [ 1067.327219] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.337743] env[70020]: DEBUG oslo_vmware.api [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]525213bc-27b4-f1dc-6a06-dde8784e8c3f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.343262] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-898cb7a7-8c7e-47e4-a328-002b32ab9ff9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.353878] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c555a32-6ce1-44d2-8cd8-4f473c65d79d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.377979] env[70020]: DEBUG oslo_vmware.api [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': task-3618881, 'name': ReconfigVM_Task, 'duration_secs': 0.656987} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.380889] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Reconfigured VM instance instance-0000005f to attach disk [datastore1] 1ddd5a29-075b-482a-a6e9-4c7345673a00/1ddd5a29-075b-482a-a6e9-4c7345673a00.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1067.381308] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-796f6b1c-d1e2-4154-b765-43c0a7663c93 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.402054] env[70020]: DEBUG nova.compute.manager [req-a24d9daf-44cd-4024-9b24-47d4e187be58 req-4ad2949c-380c-47c6-a5ea-d0137349308c service nova] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Detach interface failed, port_id=92b2e42f-1b28-4c86-a59d-e06f2adc11da, reason: Instance 3dedfa48-0839-462e-8c32-ba5252f07ac0 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1067.402577] env[70020]: DEBUG oslo_vmware.api [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Waiting for the task: (returnval){ [ 1067.402577] env[70020]: value = "task-3618886" [ 1067.402577] env[70020]: _type = "Task" [ 1067.402577] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.412924] env[70020]: DEBUG oslo_vmware.api [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618882, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.575452} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.413635] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 9d1568bf-4027-4d4c-b089-276006eee715/9d1568bf-4027-4d4c-b089-276006eee715.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1067.413881] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1067.414188] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-384e943d-41da-46b9-9405-01c9ad3bf8f9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.419337] env[70020]: DEBUG oslo_vmware.api [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': task-3618886, 'name': Rename_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.428021] env[70020]: INFO nova.compute.manager [-] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Took 2.11 seconds to deallocate network for instance. [ 1067.428422] env[70020]: DEBUG oslo_vmware.api [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1067.428422] env[70020]: value = "task-3618887" [ 1067.428422] env[70020]: _type = "Task" [ 1067.428422] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.448462] env[70020]: DEBUG oslo_vmware.api [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618887, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.600079] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c06e2835-7205-4827-b59e-d73f2173c09b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.609396] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c930ffb0-ae06-432e-ae39-daee1a6e36f6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.641835] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb1d489f-484a-4061-a346-2282797b1cba {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.651549] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a64d50b-a5d4-4e97-82d8-5054a1a5a562 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.668130] env[70020]: DEBUG nova.compute.provider_tree [None req-12405c8e-d864-4f6b-b515-ff2357d0da1e tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1067.756276] env[70020]: DEBUG oslo_vmware.api [None req-6fdd9544-f901-49f8-840a-9ae51702aba4 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618885, 'name': PowerOnVM_Task, 'duration_secs': 0.429824} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.756599] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fdd9544-f901-49f8-840a-9ae51702aba4 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1067.756870] env[70020]: DEBUG nova.compute.manager [None req-6fdd9544-f901-49f8-840a-9ae51702aba4 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1067.757721] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efd104d8-a1dc-41e8-adb5-6d0e4c8d9d9f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.776537] env[70020]: DEBUG oslo_vmware.api [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3618884, 'name': PowerOnVM_Task, 'duration_secs': 0.562272} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.776895] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1067.777060] env[70020]: INFO nova.compute.manager [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Took 9.60 seconds to spawn the instance on the hypervisor. [ 1067.777254] env[70020]: DEBUG nova.compute.manager [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1067.778017] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74bc4cd3-a8fe-4dd2-8354-fced8851b00f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.840119] env[70020]: DEBUG oslo_vmware.api [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]525213bc-27b4-f1dc-6a06-dde8784e8c3f, 'name': SearchDatastore_Task, 'duration_secs': 0.018623} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.840421] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1067.840684] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1067.840953] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1067.841139] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1067.841375] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1067.841731] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7ffbba99-202f-482e-b34d-d677d5692038 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.853916] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1067.854131] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1067.854981] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b88198b6-3e93-49fa-8363-d81400a7b29f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.862609] env[70020]: DEBUG oslo_vmware.api [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1067.862609] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52f275df-7032-cf70-1c4c-a6d81add555d" [ 1067.862609] env[70020]: _type = "Task" [ 1067.862609] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.872192] env[70020]: DEBUG oslo_vmware.api [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52f275df-7032-cf70-1c4c-a6d81add555d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.916243] env[70020]: DEBUG oslo_vmware.api [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': task-3618886, 'name': Rename_Task, 'duration_secs': 0.168074} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.916532] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1067.917144] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e72b477d-4acf-4d61-af05-6a85e9ededd1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.926192] env[70020]: DEBUG oslo_vmware.api [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Waiting for the task: (returnval){ [ 1067.926192] env[70020]: value = "task-3618888" [ 1067.926192] env[70020]: _type = "Task" [ 1067.926192] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.937864] env[70020]: DEBUG oslo_vmware.api [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': task-3618888, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.947428] env[70020]: DEBUG oslo_concurrency.lockutils [None req-91c61070-b4f1-44bf-9ee8-5a979ec3056d tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1067.947428] env[70020]: DEBUG oslo_vmware.api [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618887, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086084} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.947428] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1067.947428] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13d920f8-1f05-4a8c-b725-bb11646f8110 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.972881] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] 9d1568bf-4027-4d4c-b089-276006eee715/9d1568bf-4027-4d4c-b089-276006eee715.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1067.973628] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b23e9c9b-8b6e-4704-bac4-fb2a3009eb91 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.001027] env[70020]: DEBUG oslo_vmware.api [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1068.001027] env[70020]: value = "task-3618889" [ 1068.001027] env[70020]: _type = "Task" [ 1068.001027] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.009652] env[70020]: DEBUG oslo_vmware.api [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618889, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.175345] env[70020]: DEBUG nova.scheduler.client.report [None req-12405c8e-d864-4f6b-b515-ff2357d0da1e tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1068.305160] env[70020]: INFO nova.compute.manager [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Took 35.16 seconds to build instance. [ 1068.374589] env[70020]: DEBUG oslo_vmware.api [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52f275df-7032-cf70-1c4c-a6d81add555d, 'name': SearchDatastore_Task, 'duration_secs': 0.025319} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.374817] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a72fd6cb-e006-4fef-9b7d-e08974bf061d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.383689] env[70020]: DEBUG oslo_vmware.api [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1068.383689] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52ee458f-fad8-309c-fb9e-2db4dc8e7718" [ 1068.383689] env[70020]: _type = "Task" [ 1068.383689] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.393896] env[70020]: DEBUG oslo_vmware.api [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ee458f-fad8-309c-fb9e-2db4dc8e7718, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.436458] env[70020]: DEBUG oslo_vmware.api [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': task-3618888, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.510355] env[70020]: DEBUG oslo_vmware.api [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618889, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.686162] env[70020]: DEBUG oslo_concurrency.lockutils [None req-12405c8e-d864-4f6b-b515-ff2357d0da1e tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.978s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1068.689390] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e0c90b65-6fa2-406b-823f-784c1bfc90e4 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.863s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1068.689827] env[70020]: DEBUG nova.objects.instance [None req-e0c90b65-6fa2-406b-823f-784c1bfc90e4 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lazy-loading 'resources' on Instance uuid 5c216231-afc5-41df-a243-bb2a17c20bfe {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1068.715436] env[70020]: INFO nova.scheduler.client.report [None req-12405c8e-d864-4f6b-b515-ff2357d0da1e tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Deleted allocations for instance d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52 [ 1068.768729] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d7af7123-688f-405f-8e94-c6e826ac8cd9 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Acquiring lock "97fe6c57-03de-4cf8-a990-ff4f88db6cd7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1068.769039] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d7af7123-688f-405f-8e94-c6e826ac8cd9 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Lock "97fe6c57-03de-4cf8-a990-ff4f88db6cd7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1068.769260] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d7af7123-688f-405f-8e94-c6e826ac8cd9 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Acquiring lock "97fe6c57-03de-4cf8-a990-ff4f88db6cd7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1068.769445] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d7af7123-688f-405f-8e94-c6e826ac8cd9 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Lock "97fe6c57-03de-4cf8-a990-ff4f88db6cd7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1068.769614] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d7af7123-688f-405f-8e94-c6e826ac8cd9 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Lock "97fe6c57-03de-4cf8-a990-ff4f88db6cd7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1068.771640] env[70020]: INFO nova.compute.manager [None req-d7af7123-688f-405f-8e94-c6e826ac8cd9 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Terminating instance [ 1068.807698] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0a63dc1a-d80e-4aac-9f33-93296e9294b9 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "c972e083-8c91-4875-a8c6-8257b06c93a1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.672s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1068.894418] env[70020]: DEBUG oslo_vmware.api [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ee458f-fad8-309c-fb9e-2db4dc8e7718, 'name': SearchDatastore_Task, 'duration_secs': 0.024069} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.894743] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1068.894967] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 8dbb1de0-38de-493f-9512-b8754bab7bcb/8dbb1de0-38de-493f-9512-b8754bab7bcb.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1068.895257] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ac4af7d3-5b36-4f96-b6e8-7c7b9c63c4d0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.905874] env[70020]: DEBUG oslo_vmware.api [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1068.905874] env[70020]: value = "task-3618891" [ 1068.905874] env[70020]: _type = "Task" [ 1068.905874] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.915988] env[70020]: DEBUG oslo_vmware.api [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618891, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.935697] env[70020]: DEBUG oslo_vmware.api [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': task-3618888, 'name': PowerOnVM_Task, 'duration_secs': 0.835472} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.935968] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1068.936880] env[70020]: DEBUG nova.compute.manager [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1068.938089] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-673498b0-7a5d-4c8d-b210-871d3190d2f5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.949148] env[70020]: DEBUG nova.compute.manager [req-1ce5ae78-acca-48ae-be85-5b160e244ffc req-f5a07378-6935-4ad3-957a-ade36e38ff3c service nova] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Received event network-changed-179ff8c1-53f9-4484-9dce-1fd85174d71d {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1068.949148] env[70020]: DEBUG nova.compute.manager [req-1ce5ae78-acca-48ae-be85-5b160e244ffc req-f5a07378-6935-4ad3-957a-ade36e38ff3c service nova] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Refreshing instance network info cache due to event network-changed-179ff8c1-53f9-4484-9dce-1fd85174d71d. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1068.949148] env[70020]: DEBUG oslo_concurrency.lockutils [req-1ce5ae78-acca-48ae-be85-5b160e244ffc req-f5a07378-6935-4ad3-957a-ade36e38ff3c service nova] Acquiring lock "refresh_cache-c972e083-8c91-4875-a8c6-8257b06c93a1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1068.949148] env[70020]: DEBUG oslo_concurrency.lockutils [req-1ce5ae78-acca-48ae-be85-5b160e244ffc req-f5a07378-6935-4ad3-957a-ade36e38ff3c service nova] Acquired lock "refresh_cache-c972e083-8c91-4875-a8c6-8257b06c93a1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1068.949148] env[70020]: DEBUG nova.network.neutron [req-1ce5ae78-acca-48ae-be85-5b160e244ffc req-f5a07378-6935-4ad3-957a-ade36e38ff3c service nova] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Refreshing network info cache for port 179ff8c1-53f9-4484-9dce-1fd85174d71d {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1069.013157] env[70020]: DEBUG oslo_vmware.api [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618889, 'name': ReconfigVM_Task, 'duration_secs': 0.757694} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.013450] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Reconfigured VM instance instance-00000062 to attach disk [datastore1] 9d1568bf-4027-4d4c-b089-276006eee715/9d1568bf-4027-4d4c-b089-276006eee715.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1069.014112] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-06769a20-4de3-40ee-8b91-c0db8609084d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.024332] env[70020]: DEBUG oslo_vmware.api [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1069.024332] env[70020]: value = "task-3618892" [ 1069.024332] env[70020]: _type = "Task" [ 1069.024332] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.034159] env[70020]: DEBUG oslo_vmware.api [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618892, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.229196] env[70020]: DEBUG oslo_concurrency.lockutils [None req-12405c8e-d864-4f6b-b515-ff2357d0da1e tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Lock "d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.511s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1069.275160] env[70020]: DEBUG nova.compute.manager [None req-d7af7123-688f-405f-8e94-c6e826ac8cd9 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1069.275404] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d7af7123-688f-405f-8e94-c6e826ac8cd9 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1069.276964] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-763604c5-0d82-454e-aef8-4f6c46e3cb85 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.291407] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7af7123-688f-405f-8e94-c6e826ac8cd9 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1069.292096] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a9b95f84-41ad-4f90-a5f1-196ea8d60abe {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.305423] env[70020]: DEBUG oslo_vmware.api [None req-d7af7123-688f-405f-8e94-c6e826ac8cd9 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Waiting for the task: (returnval){ [ 1069.305423] env[70020]: value = "task-3618893" [ 1069.305423] env[70020]: _type = "Task" [ 1069.305423] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.326281] env[70020]: DEBUG oslo_vmware.api [None req-d7af7123-688f-405f-8e94-c6e826ac8cd9 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Task: {'id': task-3618893, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.426048] env[70020]: DEBUG oslo_vmware.api [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618891, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.472478] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1069.542226] env[70020]: DEBUG oslo_vmware.api [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618892, 'name': Rename_Task, 'duration_secs': 0.199086} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.545482] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1069.546292] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-44e81557-73c8-44b4-ad81-e24fa779f247 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.559227] env[70020]: DEBUG oslo_vmware.api [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1069.559227] env[70020]: value = "task-3618894" [ 1069.559227] env[70020]: _type = "Task" [ 1069.559227] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.571017] env[70020]: DEBUG oslo_vmware.api [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618894, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.605535] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03c01f42-617c-4ebb-a451-0447072af5cb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.616356] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63ac784f-e17c-43c6-b526-63994e55e2ff {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.656190] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f790d5f-77f8-4ee0-94ef-437b1339d6e2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.668801] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fc991bf-87b2-4fec-8fbb-eb6793fec979 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.683900] env[70020]: DEBUG nova.compute.provider_tree [None req-e0c90b65-6fa2-406b-823f-784c1bfc90e4 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1069.761035] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3364ee11-96ea-4a97-94ce-a9cc5c983de4 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Acquiring lock "1ddd5a29-075b-482a-a6e9-4c7345673a00" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1069.761383] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3364ee11-96ea-4a97-94ce-a9cc5c983de4 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Lock "1ddd5a29-075b-482a-a6e9-4c7345673a00" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1069.761779] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3364ee11-96ea-4a97-94ce-a9cc5c983de4 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Acquiring lock "1ddd5a29-075b-482a-a6e9-4c7345673a00-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1069.761940] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3364ee11-96ea-4a97-94ce-a9cc5c983de4 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Lock "1ddd5a29-075b-482a-a6e9-4c7345673a00-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1069.762171] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3364ee11-96ea-4a97-94ce-a9cc5c983de4 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Lock "1ddd5a29-075b-482a-a6e9-4c7345673a00-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1069.765907] env[70020]: DEBUG nova.network.neutron [req-1ce5ae78-acca-48ae-be85-5b160e244ffc req-f5a07378-6935-4ad3-957a-ade36e38ff3c service nova] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Updated VIF entry in instance network info cache for port 179ff8c1-53f9-4484-9dce-1fd85174d71d. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1069.766304] env[70020]: DEBUG nova.network.neutron [req-1ce5ae78-acca-48ae-be85-5b160e244ffc req-f5a07378-6935-4ad3-957a-ade36e38ff3c service nova] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Updating instance_info_cache with network_info: [{"id": "179ff8c1-53f9-4484-9dce-1fd85174d71d", "address": "fa:16:3e:d7:53:7a", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap179ff8c1-53", "ovs_interfaceid": "179ff8c1-53f9-4484-9dce-1fd85174d71d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1069.767511] env[70020]: INFO nova.compute.manager [None req-3364ee11-96ea-4a97-94ce-a9cc5c983de4 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Terminating instance [ 1069.823083] env[70020]: DEBUG oslo_vmware.api [None req-d7af7123-688f-405f-8e94-c6e826ac8cd9 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Task: {'id': task-3618893, 'name': PowerOffVM_Task, 'duration_secs': 0.249172} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.823536] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7af7123-688f-405f-8e94-c6e826ac8cd9 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1069.823631] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d7af7123-688f-405f-8e94-c6e826ac8cd9 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1069.823941] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3aaf11b4-34da-4ed1-b1a7-1bff7b0880fa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.895649] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d7af7123-688f-405f-8e94-c6e826ac8cd9 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1069.896140] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d7af7123-688f-405f-8e94-c6e826ac8cd9 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1069.896206] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7af7123-688f-405f-8e94-c6e826ac8cd9 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Deleting the datastore file [datastore1] 97fe6c57-03de-4cf8-a990-ff4f88db6cd7 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1069.896506] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-61c9a933-dbae-403d-ad7f-ab89dc5d4a44 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.904947] env[70020]: DEBUG oslo_vmware.api [None req-d7af7123-688f-405f-8e94-c6e826ac8cd9 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Waiting for the task: (returnval){ [ 1069.904947] env[70020]: value = "task-3618896" [ 1069.904947] env[70020]: _type = "Task" [ 1069.904947] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.913033] env[70020]: DEBUG oslo_vmware.api [None req-d7af7123-688f-405f-8e94-c6e826ac8cd9 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Task: {'id': task-3618896, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.921662] env[70020]: DEBUG oslo_vmware.api [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618891, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.574888} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.921921] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 8dbb1de0-38de-493f-9512-b8754bab7bcb/8dbb1de0-38de-493f-9512-b8754bab7bcb.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1069.922169] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1069.922415] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-baf47354-c03c-40d4-b16b-766c3e716cc1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.931379] env[70020]: DEBUG oslo_vmware.api [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1069.931379] env[70020]: value = "task-3618897" [ 1069.931379] env[70020]: _type = "Task" [ 1069.931379] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.941488] env[70020]: DEBUG oslo_vmware.api [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618897, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.071732] env[70020]: DEBUG oslo_vmware.api [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618894, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.165269] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a4547a75-8fb0-4460-be2b-324223a0ea7c tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Acquiring lock "2198e7f8-5458-4b97-abb3-0a3c932cebc2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1070.165269] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a4547a75-8fb0-4460-be2b-324223a0ea7c tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Lock "2198e7f8-5458-4b97-abb3-0a3c932cebc2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1070.165269] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a4547a75-8fb0-4460-be2b-324223a0ea7c tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Acquiring lock "2198e7f8-5458-4b97-abb3-0a3c932cebc2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1070.165269] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a4547a75-8fb0-4460-be2b-324223a0ea7c tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Lock "2198e7f8-5458-4b97-abb3-0a3c932cebc2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1070.165269] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a4547a75-8fb0-4460-be2b-324223a0ea7c tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Lock "2198e7f8-5458-4b97-abb3-0a3c932cebc2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1070.167040] env[70020]: INFO nova.compute.manager [None req-a4547a75-8fb0-4460-be2b-324223a0ea7c tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Terminating instance [ 1070.190110] env[70020]: DEBUG nova.scheduler.client.report [None req-e0c90b65-6fa2-406b-823f-784c1bfc90e4 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1070.270920] env[70020]: DEBUG oslo_concurrency.lockutils [req-1ce5ae78-acca-48ae-be85-5b160e244ffc req-f5a07378-6935-4ad3-957a-ade36e38ff3c service nova] Releasing lock "refresh_cache-c972e083-8c91-4875-a8c6-8257b06c93a1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1070.270920] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3364ee11-96ea-4a97-94ce-a9cc5c983de4 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Acquiring lock "refresh_cache-1ddd5a29-075b-482a-a6e9-4c7345673a00" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1070.270920] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3364ee11-96ea-4a97-94ce-a9cc5c983de4 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Acquired lock "refresh_cache-1ddd5a29-075b-482a-a6e9-4c7345673a00" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1070.270920] env[70020]: DEBUG nova.network.neutron [None req-3364ee11-96ea-4a97-94ce-a9cc5c983de4 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1070.293693] env[70020]: DEBUG oslo_concurrency.lockutils [None req-49947e67-928d-488e-af87-c580e4249f8b tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Acquiring lock "a39731d2-0b9b-41fa-b9ac-f80193a26d20" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1070.294054] env[70020]: DEBUG oslo_concurrency.lockutils [None req-49947e67-928d-488e-af87-c580e4249f8b tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Lock "a39731d2-0b9b-41fa-b9ac-f80193a26d20" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1070.294323] env[70020]: DEBUG oslo_concurrency.lockutils [None req-49947e67-928d-488e-af87-c580e4249f8b tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Acquiring lock "a39731d2-0b9b-41fa-b9ac-f80193a26d20-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1070.294520] env[70020]: DEBUG oslo_concurrency.lockutils [None req-49947e67-928d-488e-af87-c580e4249f8b tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Lock "a39731d2-0b9b-41fa-b9ac-f80193a26d20-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1070.294686] env[70020]: DEBUG oslo_concurrency.lockutils [None req-49947e67-928d-488e-af87-c580e4249f8b tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Lock "a39731d2-0b9b-41fa-b9ac-f80193a26d20-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1070.297575] env[70020]: INFO nova.compute.manager [None req-49947e67-928d-488e-af87-c580e4249f8b tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Terminating instance [ 1070.415357] env[70020]: DEBUG oslo_vmware.api [None req-d7af7123-688f-405f-8e94-c6e826ac8cd9 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Task: {'id': task-3618896, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.263817} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.415807] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7af7123-688f-405f-8e94-c6e826ac8cd9 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1070.416130] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d7af7123-688f-405f-8e94-c6e826ac8cd9 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1070.416590] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d7af7123-688f-405f-8e94-c6e826ac8cd9 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1070.416590] env[70020]: INFO nova.compute.manager [None req-d7af7123-688f-405f-8e94-c6e826ac8cd9 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1070.416816] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d7af7123-688f-405f-8e94-c6e826ac8cd9 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1070.417063] env[70020]: DEBUG nova.compute.manager [-] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1070.417130] env[70020]: DEBUG nova.network.neutron [-] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1070.443023] env[70020]: DEBUG oslo_vmware.api [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618897, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074869} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.443023] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1070.443336] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0836fd9c-f58b-453e-a850-126a28cbccb2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.467394] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] 8dbb1de0-38de-493f-9512-b8754bab7bcb/8dbb1de0-38de-493f-9512-b8754bab7bcb.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1070.467700] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dd8c715c-b524-452f-a7f6-002a4dbceff8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.494935] env[70020]: DEBUG oslo_vmware.api [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1070.494935] env[70020]: value = "task-3618898" [ 1070.494935] env[70020]: _type = "Task" [ 1070.494935] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.503580] env[70020]: DEBUG oslo_vmware.api [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618898, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.569192] env[70020]: DEBUG oslo_vmware.api [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618894, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.673511] env[70020]: DEBUG nova.compute.manager [None req-a4547a75-8fb0-4460-be2b-324223a0ea7c tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1070.673511] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-a4547a75-8fb0-4460-be2b-324223a0ea7c tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1070.674591] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3e61de8-d098-4f7e-afe4-61aceaf30833 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.684207] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4547a75-8fb0-4460-be2b-324223a0ea7c tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1070.684488] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0be7a976-a9cc-4b53-84e2-9f57c8c186f0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.694215] env[70020]: DEBUG oslo_vmware.api [None req-a4547a75-8fb0-4460-be2b-324223a0ea7c tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for the task: (returnval){ [ 1070.694215] env[70020]: value = "task-3618899" [ 1070.694215] env[70020]: _type = "Task" [ 1070.694215] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.699858] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e0c90b65-6fa2-406b-823f-784c1bfc90e4 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.010s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1070.703798] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 14.464s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1070.707923] env[70020]: DEBUG nova.compute.manager [req-ef4df929-abef-4bfa-b894-1bb0bae4ca35 req-1a125174-02a8-4996-a65f-a8e3145e6dff service nova] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Received event network-vif-deleted-658a9776-9b7b-4d90-86b3-79a86023c519 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1070.708208] env[70020]: INFO nova.compute.manager [req-ef4df929-abef-4bfa-b894-1bb0bae4ca35 req-1a125174-02a8-4996-a65f-a8e3145e6dff service nova] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Neutron deleted interface 658a9776-9b7b-4d90-86b3-79a86023c519; detaching it from the instance and deleting it from the info cache [ 1070.709178] env[70020]: DEBUG nova.network.neutron [req-ef4df929-abef-4bfa-b894-1bb0bae4ca35 req-1a125174-02a8-4996-a65f-a8e3145e6dff service nova] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1070.715353] env[70020]: DEBUG oslo_vmware.api [None req-a4547a75-8fb0-4460-be2b-324223a0ea7c tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618899, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.735501] env[70020]: INFO nova.scheduler.client.report [None req-e0c90b65-6fa2-406b-823f-784c1bfc90e4 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Deleted allocations for instance 5c216231-afc5-41df-a243-bb2a17c20bfe [ 1070.792504] env[70020]: DEBUG nova.network.neutron [None req-3364ee11-96ea-4a97-94ce-a9cc5c983de4 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1070.802644] env[70020]: DEBUG nova.compute.manager [None req-49947e67-928d-488e-af87-c580e4249f8b tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1070.802905] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-49947e67-928d-488e-af87-c580e4249f8b tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1070.803863] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fe02f05-69b2-43df-942b-6b5b184fb59c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.813720] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-49947e67-928d-488e-af87-c580e4249f8b tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1070.814037] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a10ef9b1-5cd3-4a1d-b0c8-543b6525dd10 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.823736] env[70020]: DEBUG oslo_vmware.api [None req-49947e67-928d-488e-af87-c580e4249f8b tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for the task: (returnval){ [ 1070.823736] env[70020]: value = "task-3618900" [ 1070.823736] env[70020]: _type = "Task" [ 1070.823736] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.835936] env[70020]: DEBUG oslo_vmware.api [None req-49947e67-928d-488e-af87-c580e4249f8b tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618900, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.862837] env[70020]: DEBUG nova.network.neutron [None req-3364ee11-96ea-4a97-94ce-a9cc5c983de4 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1071.005954] env[70020]: DEBUG oslo_vmware.api [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618898, 'name': ReconfigVM_Task, 'duration_secs': 0.484373} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.007049] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Reconfigured VM instance instance-00000063 to attach disk [datastore1] 8dbb1de0-38de-493f-9512-b8754bab7bcb/8dbb1de0-38de-493f-9512-b8754bab7bcb.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1071.007551] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e1e7c1f9-22d9-44b0-9b7c-3c207a987db8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.015364] env[70020]: DEBUG oslo_vmware.api [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1071.015364] env[70020]: value = "task-3618901" [ 1071.015364] env[70020]: _type = "Task" [ 1071.015364] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.024991] env[70020]: DEBUG oslo_vmware.api [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618901, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.070295] env[70020]: DEBUG oslo_vmware.api [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618894, 'name': PowerOnVM_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.174766] env[70020]: DEBUG nova.network.neutron [-] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1071.207917] env[70020]: DEBUG oslo_vmware.api [None req-a4547a75-8fb0-4460-be2b-324223a0ea7c tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618899, 'name': PowerOffVM_Task, 'duration_secs': 0.300259} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.207917] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4547a75-8fb0-4460-be2b-324223a0ea7c tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1071.207917] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-a4547a75-8fb0-4460-be2b-324223a0ea7c tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1071.207917] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ea93e89f-72cb-4f87-8002-ede68640fd70 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.216858] env[70020]: INFO nova.compute.claims [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1071.227056] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-14e8a348-6773-448c-89d6-708a2fc1078a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.230764] env[70020]: DEBUG oslo_concurrency.lockutils [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquiring lock "9e7bd10b-3a78-48d8-9b66-e3646635be6d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1071.231142] env[70020]: DEBUG oslo_concurrency.lockutils [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lock "9e7bd10b-3a78-48d8-9b66-e3646635be6d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1071.242598] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ffc0964-b4d1-4f9a-8842-7a93930c5fc1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.260499] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e0c90b65-6fa2-406b-823f-784c1bfc90e4 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "5c216231-afc5-41df-a243-bb2a17c20bfe" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.088s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1071.290100] env[70020]: DEBUG nova.compute.manager [req-ef4df929-abef-4bfa-b894-1bb0bae4ca35 req-1a125174-02a8-4996-a65f-a8e3145e6dff service nova] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Detach interface failed, port_id=658a9776-9b7b-4d90-86b3-79a86023c519, reason: Instance 97fe6c57-03de-4cf8-a990-ff4f88db6cd7 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1071.311562] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-a4547a75-8fb0-4460-be2b-324223a0ea7c tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1071.311809] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-a4547a75-8fb0-4460-be2b-324223a0ea7c tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1071.312057] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4547a75-8fb0-4460-be2b-324223a0ea7c tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Deleting the datastore file [datastore2] 2198e7f8-5458-4b97-abb3-0a3c932cebc2 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1071.312285] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0e4e7c72-b870-480a-b319-eda0d12ba2e6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.321011] env[70020]: DEBUG oslo_vmware.api [None req-a4547a75-8fb0-4460-be2b-324223a0ea7c tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for the task: (returnval){ [ 1071.321011] env[70020]: value = "task-3618903" [ 1071.321011] env[70020]: _type = "Task" [ 1071.321011] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.333738] env[70020]: DEBUG oslo_vmware.api [None req-49947e67-928d-488e-af87-c580e4249f8b tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618900, 'name': PowerOffVM_Task, 'duration_secs': 0.251061} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.336782] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-49947e67-928d-488e-af87-c580e4249f8b tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1071.336986] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-49947e67-928d-488e-af87-c580e4249f8b tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1071.337296] env[70020]: DEBUG oslo_vmware.api [None req-a4547a75-8fb0-4460-be2b-324223a0ea7c tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618903, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.337529] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cd5682f0-9004-44f3-82b7-1fd4e68e6dad {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.365633] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3364ee11-96ea-4a97-94ce-a9cc5c983de4 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Releasing lock "refresh_cache-1ddd5a29-075b-482a-a6e9-4c7345673a00" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1071.366254] env[70020]: DEBUG nova.compute.manager [None req-3364ee11-96ea-4a97-94ce-a9cc5c983de4 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1071.366544] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3364ee11-96ea-4a97-94ce-a9cc5c983de4 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1071.367796] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea249c24-5a16-458d-8b7a-de075c8fb516 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.379409] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3364ee11-96ea-4a97-94ce-a9cc5c983de4 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1071.379735] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5503403c-80da-4dc7-be22-48c381de17f5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.389375] env[70020]: DEBUG oslo_vmware.api [None req-3364ee11-96ea-4a97-94ce-a9cc5c983de4 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Waiting for the task: (returnval){ [ 1071.389375] env[70020]: value = "task-3618905" [ 1071.389375] env[70020]: _type = "Task" [ 1071.389375] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.410956] env[70020]: DEBUG oslo_vmware.api [None req-3364ee11-96ea-4a97-94ce-a9cc5c983de4 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': task-3618905, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.419398] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-49947e67-928d-488e-af87-c580e4249f8b tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1071.419398] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-49947e67-928d-488e-af87-c580e4249f8b tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1071.419398] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-49947e67-928d-488e-af87-c580e4249f8b tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Deleting the datastore file [datastore2] a39731d2-0b9b-41fa-b9ac-f80193a26d20 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1071.419705] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-63a4e2c5-184f-4f4f-83c7-58dc94bcb926 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.428663] env[70020]: DEBUG oslo_vmware.api [None req-49947e67-928d-488e-af87-c580e4249f8b tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for the task: (returnval){ [ 1071.428663] env[70020]: value = "task-3618906" [ 1071.428663] env[70020]: _type = "Task" [ 1071.428663] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.438896] env[70020]: DEBUG oslo_vmware.api [None req-49947e67-928d-488e-af87-c580e4249f8b tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618906, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.527775] env[70020]: DEBUG oslo_vmware.api [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618901, 'name': Rename_Task, 'duration_secs': 0.219073} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.528092] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1071.528357] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0774d8fe-c4b1-48c7-b2b9-2f277b0d6664 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.538406] env[70020]: DEBUG oslo_vmware.api [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1071.538406] env[70020]: value = "task-3618908" [ 1071.538406] env[70020]: _type = "Task" [ 1071.538406] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.557115] env[70020]: DEBUG oslo_vmware.api [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618908, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.574483] env[70020]: DEBUG oslo_vmware.api [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618894, 'name': PowerOnVM_Task, 'duration_secs': 1.600126} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.574862] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1071.575082] env[70020]: INFO nova.compute.manager [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Took 10.98 seconds to spawn the instance on the hypervisor. [ 1071.575277] env[70020]: DEBUG nova.compute.manager [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1071.576162] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-623b5ed0-4a46-4726-a410-7bead617848d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.681248] env[70020]: INFO nova.compute.manager [-] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Took 1.26 seconds to deallocate network for instance. [ 1071.729132] env[70020]: INFO nova.compute.resource_tracker [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Updating resource usage from migration 1a2443fb-c003-49f2-8631-b910a7bd8e63 [ 1071.735635] env[70020]: DEBUG nova.compute.manager [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1071.832425] env[70020]: DEBUG oslo_vmware.api [None req-a4547a75-8fb0-4460-be2b-324223a0ea7c tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Task: {'id': task-3618903, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.214525} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.832778] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4547a75-8fb0-4460-be2b-324223a0ea7c tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1071.832998] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-a4547a75-8fb0-4460-be2b-324223a0ea7c tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1071.833220] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-a4547a75-8fb0-4460-be2b-324223a0ea7c tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1071.833420] env[70020]: INFO nova.compute.manager [None req-a4547a75-8fb0-4460-be2b-324223a0ea7c tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1071.833685] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a4547a75-8fb0-4460-be2b-324223a0ea7c tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1071.833930] env[70020]: DEBUG nova.compute.manager [-] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1071.834056] env[70020]: DEBUG nova.network.neutron [-] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1071.899879] env[70020]: DEBUG oslo_vmware.api [None req-3364ee11-96ea-4a97-94ce-a9cc5c983de4 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': task-3618905, 'name': PowerOffVM_Task, 'duration_secs': 0.168575} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.902419] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3364ee11-96ea-4a97-94ce-a9cc5c983de4 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1071.902594] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3364ee11-96ea-4a97-94ce-a9cc5c983de4 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1071.903100] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-44748cd7-5365-44eb-875a-6f2aa7d4eb2f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.940372] env[70020]: DEBUG oslo_vmware.api [None req-49947e67-928d-488e-af87-c580e4249f8b tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Task: {'id': task-3618906, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.402018} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.940657] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-49947e67-928d-488e-af87-c580e4249f8b tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1071.941338] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-49947e67-928d-488e-af87-c580e4249f8b tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1071.941338] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-49947e67-928d-488e-af87-c580e4249f8b tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1071.941338] env[70020]: INFO nova.compute.manager [None req-49947e67-928d-488e-af87-c580e4249f8b tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1071.941554] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-49947e67-928d-488e-af87-c580e4249f8b tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1071.943081] env[70020]: DEBUG nova.compute.manager [-] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1071.943081] env[70020]: DEBUG nova.network.neutron [-] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1071.944974] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3364ee11-96ea-4a97-94ce-a9cc5c983de4 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1071.945228] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3364ee11-96ea-4a97-94ce-a9cc5c983de4 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1071.945422] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-3364ee11-96ea-4a97-94ce-a9cc5c983de4 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Deleting the datastore file [datastore1] 1ddd5a29-075b-482a-a6e9-4c7345673a00 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1071.948410] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b6c69480-00d2-4ef1-96aa-ec070955fef1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.955986] env[70020]: DEBUG oslo_vmware.api [None req-3364ee11-96ea-4a97-94ce-a9cc5c983de4 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Waiting for the task: (returnval){ [ 1071.955986] env[70020]: value = "task-3618910" [ 1071.955986] env[70020]: _type = "Task" [ 1071.955986] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.968584] env[70020]: DEBUG oslo_vmware.api [None req-3364ee11-96ea-4a97-94ce-a9cc5c983de4 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': task-3618910, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.049625] env[70020]: DEBUG oslo_vmware.api [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618908, 'name': PowerOnVM_Task} progress is 88%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.079147] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9fd0e27-f29d-48fc-b324-9cb6e1a04918 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.087153] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fd0311e-095d-4d38-adc7-8170a6a678c9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.101967] env[70020]: INFO nova.compute.manager [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Took 38.17 seconds to build instance. [ 1072.132964] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a98431dd-9138-4c99-8360-8e0d98229c56 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.144207] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73ce2bfe-99d2-43f7-8be9-7d9d35bf9692 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.160707] env[70020]: DEBUG nova.compute.provider_tree [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1072.189342] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d7af7123-688f-405f-8e94-c6e826ac8cd9 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1072.254705] env[70020]: DEBUG oslo_concurrency.lockutils [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1072.468641] env[70020]: DEBUG oslo_vmware.api [None req-3364ee11-96ea-4a97-94ce-a9cc5c983de4 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Task: {'id': task-3618910, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.240969} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.468914] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-3364ee11-96ea-4a97-94ce-a9cc5c983de4 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1072.469129] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3364ee11-96ea-4a97-94ce-a9cc5c983de4 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1072.469308] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3364ee11-96ea-4a97-94ce-a9cc5c983de4 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1072.469479] env[70020]: INFO nova.compute.manager [None req-3364ee11-96ea-4a97-94ce-a9cc5c983de4 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1072.469716] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3364ee11-96ea-4a97-94ce-a9cc5c983de4 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1072.469908] env[70020]: DEBUG nova.compute.manager [-] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1072.470013] env[70020]: DEBUG nova.network.neutron [-] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1072.494940] env[70020]: DEBUG nova.network.neutron [-] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1072.532893] env[70020]: DEBUG nova.compute.manager [req-e0cc2130-0f1d-4249-9e3a-32efe6d16754 req-0768feb3-d638-464e-9b04-2fefa83130dd service nova] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Received event network-vif-deleted-46420036-2adf-470d-b041-a6487903eed6 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1072.533226] env[70020]: INFO nova.compute.manager [req-e0cc2130-0f1d-4249-9e3a-32efe6d16754 req-0768feb3-d638-464e-9b04-2fefa83130dd service nova] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Neutron deleted interface 46420036-2adf-470d-b041-a6487903eed6; detaching it from the instance and deleting it from the info cache [ 1072.533378] env[70020]: DEBUG nova.network.neutron [req-e0cc2130-0f1d-4249-9e3a-32efe6d16754 req-0768feb3-d638-464e-9b04-2fefa83130dd service nova] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1072.550030] env[70020]: DEBUG oslo_vmware.api [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618908, 'name': PowerOnVM_Task, 'duration_secs': 0.842317} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.551142] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1072.551351] env[70020]: INFO nova.compute.manager [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Took 9.35 seconds to spawn the instance on the hypervisor. [ 1072.551528] env[70020]: DEBUG nova.compute.manager [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1072.552546] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50e22518-99a3-49fa-a5b8-d5b7ff1f8be3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.630447] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8f2e9acf-52ca-41f2-abfb-11791fe2976c tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "9d1568bf-4027-4d4c-b089-276006eee715" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.720s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1072.632924] env[70020]: DEBUG nova.network.neutron [-] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1072.664219] env[70020]: DEBUG nova.scheduler.client.report [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1072.755932] env[70020]: DEBUG nova.compute.manager [req-e442ab95-1756-4e57-9b4e-86135735ff8a req-37931d9d-f450-4452-9e3d-a9062dce1c74 service nova] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Received event network-vif-deleted-52cf3b73-bbee-4e96-91f2-a1caa2041501 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1072.913580] env[70020]: DEBUG nova.network.neutron [-] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1072.999418] env[70020]: DEBUG nova.network.neutron [-] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1073.041919] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-719eee2b-34ed-41aa-a579-6dc36059baf9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.053020] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7509b457-1a9f-467d-af35-a4c01f1b5478 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.071931] env[70020]: INFO nova.compute.manager [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Took 27.76 seconds to build instance. [ 1073.089319] env[70020]: DEBUG nova.compute.manager [req-e0cc2130-0f1d-4249-9e3a-32efe6d16754 req-0768feb3-d638-464e-9b04-2fefa83130dd service nova] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Detach interface failed, port_id=46420036-2adf-470d-b041-a6487903eed6, reason: Instance a39731d2-0b9b-41fa-b9ac-f80193a26d20 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1073.138154] env[70020]: INFO nova.compute.manager [-] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Took 1.30 seconds to deallocate network for instance. [ 1073.172133] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.467s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1073.172133] env[70020]: INFO nova.compute.manager [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Migrating [ 1073.181163] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 12.278s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1073.181406] env[70020]: DEBUG nova.objects.instance [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Trying to apply a migration context that does not seem to be set for this instance {{(pid=70020) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1073.367032] env[70020]: DEBUG oslo_concurrency.lockutils [None req-298e0ede-04d2-47dd-ac28-8e14e0155af4 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "9d1568bf-4027-4d4c-b089-276006eee715" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1073.367328] env[70020]: DEBUG oslo_concurrency.lockutils [None req-298e0ede-04d2-47dd-ac28-8e14e0155af4 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "9d1568bf-4027-4d4c-b089-276006eee715" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1073.367539] env[70020]: DEBUG oslo_concurrency.lockutils [None req-298e0ede-04d2-47dd-ac28-8e14e0155af4 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "9d1568bf-4027-4d4c-b089-276006eee715-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1073.367743] env[70020]: DEBUG oslo_concurrency.lockutils [None req-298e0ede-04d2-47dd-ac28-8e14e0155af4 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "9d1568bf-4027-4d4c-b089-276006eee715-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1073.367915] env[70020]: DEBUG oslo_concurrency.lockutils [None req-298e0ede-04d2-47dd-ac28-8e14e0155af4 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "9d1568bf-4027-4d4c-b089-276006eee715-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1073.370596] env[70020]: INFO nova.compute.manager [None req-298e0ede-04d2-47dd-ac28-8e14e0155af4 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Terminating instance [ 1073.417997] env[70020]: INFO nova.compute.manager [-] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Took 1.48 seconds to deallocate network for instance. [ 1073.500845] env[70020]: INFO nova.compute.manager [-] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Took 1.03 seconds to deallocate network for instance. [ 1073.576724] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b2d42d26-bf2f-4611-85e2-9f55c816b602 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "8dbb1de0-38de-493f-9512-b8754bab7bcb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.277s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1073.645140] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a4547a75-8fb0-4460-be2b-324223a0ea7c tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1073.692543] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "refresh_cache-8adadb2e-2a20-45b1-bed8-34e09df25f39" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1073.692775] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquired lock "refresh_cache-8adadb2e-2a20-45b1-bed8-34e09df25f39" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1073.692944] env[70020]: DEBUG nova.network.neutron [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1073.875575] env[70020]: DEBUG nova.compute.manager [None req-298e0ede-04d2-47dd-ac28-8e14e0155af4 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1073.875691] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-298e0ede-04d2-47dd-ac28-8e14e0155af4 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1073.876687] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbf491ac-38ab-4eef-a9b1-793796e4a779 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.886279] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-298e0ede-04d2-47dd-ac28-8e14e0155af4 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1073.886581] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-934c51fd-bb51-4986-89dc-033ccd2a2368 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.895400] env[70020]: DEBUG oslo_vmware.api [None req-298e0ede-04d2-47dd-ac28-8e14e0155af4 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1073.895400] env[70020]: value = "task-3618911" [ 1073.895400] env[70020]: _type = "Task" [ 1073.895400] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.905822] env[70020]: DEBUG oslo_vmware.api [None req-298e0ede-04d2-47dd-ac28-8e14e0155af4 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618911, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.925034] env[70020]: DEBUG oslo_concurrency.lockutils [None req-49947e67-928d-488e-af87-c580e4249f8b tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1074.007730] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3364ee11-96ea-4a97-94ce-a9cc5c983de4 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1074.201559] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fc0f50be-bed0-4f68-95b2-b001023af1c3 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.020s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1074.202751] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.089s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1074.204205] env[70020]: INFO nova.compute.claims [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1074.408168] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "f9d4837f-0e3f-4a83-9055-04d17ef3eb23" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1074.408406] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "f9d4837f-0e3f-4a83-9055-04d17ef3eb23" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1074.409441] env[70020]: DEBUG oslo_vmware.api [None req-298e0ede-04d2-47dd-ac28-8e14e0155af4 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618911, 'name': PowerOffVM_Task, 'duration_secs': 0.230116} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.409857] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-298e0ede-04d2-47dd-ac28-8e14e0155af4 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1074.410061] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-298e0ede-04d2-47dd-ac28-8e14e0155af4 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1074.410517] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-79852576-f8aa-4e16-be5d-bd5f360441de {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.419860] env[70020]: DEBUG nova.network.neutron [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Updating instance_info_cache with network_info: [{"id": "4b681dd6-fab3-4812-988e-26b219b6c5c3", "address": "fa:16:3e:29:86:67", "network": {"id": "ba2c4d3c-4191-4de5-8533-90ca5dfc1dc0", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-599216784-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e3eae740ef84ef88aef113ed4d6e57b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b681dd6-fa", "ovs_interfaceid": "4b681dd6-fab3-4812-988e-26b219b6c5c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1074.482526] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-298e0ede-04d2-47dd-ac28-8e14e0155af4 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1074.483128] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-298e0ede-04d2-47dd-ac28-8e14e0155af4 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1074.483228] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-298e0ede-04d2-47dd-ac28-8e14e0155af4 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Deleting the datastore file [datastore1] 9d1568bf-4027-4d4c-b089-276006eee715 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1074.483497] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d0088ba4-0260-4471-98bf-10257d06111c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.490297] env[70020]: DEBUG oslo_vmware.api [None req-298e0ede-04d2-47dd-ac28-8e14e0155af4 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1074.490297] env[70020]: value = "task-3618913" [ 1074.490297] env[70020]: _type = "Task" [ 1074.490297] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.498735] env[70020]: DEBUG oslo_vmware.api [None req-298e0ede-04d2-47dd-ac28-8e14e0155af4 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618913, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.782953] env[70020]: DEBUG nova.compute.manager [req-971e159a-5d11-4bc2-8453-f49f316fb437 req-25527d2f-4c36-47f4-934a-90f3f515b5c2 service nova] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Received event network-changed-8e1b8b9c-b1c2-448e-8d9c-621c1810194a {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1074.783210] env[70020]: DEBUG nova.compute.manager [req-971e159a-5d11-4bc2-8453-f49f316fb437 req-25527d2f-4c36-47f4-934a-90f3f515b5c2 service nova] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Refreshing instance network info cache due to event network-changed-8e1b8b9c-b1c2-448e-8d9c-621c1810194a. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1074.783452] env[70020]: DEBUG oslo_concurrency.lockutils [req-971e159a-5d11-4bc2-8453-f49f316fb437 req-25527d2f-4c36-47f4-934a-90f3f515b5c2 service nova] Acquiring lock "refresh_cache-8dbb1de0-38de-493f-9512-b8754bab7bcb" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1074.783584] env[70020]: DEBUG oslo_concurrency.lockutils [req-971e159a-5d11-4bc2-8453-f49f316fb437 req-25527d2f-4c36-47f4-934a-90f3f515b5c2 service nova] Acquired lock "refresh_cache-8dbb1de0-38de-493f-9512-b8754bab7bcb" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1074.783742] env[70020]: DEBUG nova.network.neutron [req-971e159a-5d11-4bc2-8453-f49f316fb437 req-25527d2f-4c36-47f4-934a-90f3f515b5c2 service nova] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Refreshing network info cache for port 8e1b8b9c-b1c2-448e-8d9c-621c1810194a {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1074.911220] env[70020]: DEBUG nova.compute.manager [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1074.921911] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Releasing lock "refresh_cache-8adadb2e-2a20-45b1-bed8-34e09df25f39" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1075.003679] env[70020]: DEBUG oslo_vmware.api [None req-298e0ede-04d2-47dd-ac28-8e14e0155af4 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618913, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.324549} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.003679] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-298e0ede-04d2-47dd-ac28-8e14e0155af4 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1075.003841] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-298e0ede-04d2-47dd-ac28-8e14e0155af4 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1075.004104] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-298e0ede-04d2-47dd-ac28-8e14e0155af4 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1075.004367] env[70020]: INFO nova.compute.manager [None req-298e0ede-04d2-47dd-ac28-8e14e0155af4 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1075.004974] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-298e0ede-04d2-47dd-ac28-8e14e0155af4 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1075.004974] env[70020]: DEBUG nova.compute.manager [-] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1075.004974] env[70020]: DEBUG nova.network.neutron [-] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1075.374398] env[70020]: DEBUG nova.compute.manager [req-a884109a-cee4-4382-b17b-3f355f835121 req-95c8b4be-bc64-4214-95de-88bb81ee3717 service nova] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Received event network-vif-deleted-1b623703-5d19-4e24-b8aa-5b76192f92f5 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1075.374642] env[70020]: INFO nova.compute.manager [req-a884109a-cee4-4382-b17b-3f355f835121 req-95c8b4be-bc64-4214-95de-88bb81ee3717 service nova] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Neutron deleted interface 1b623703-5d19-4e24-b8aa-5b76192f92f5; detaching it from the instance and deleting it from the info cache [ 1075.374759] env[70020]: DEBUG nova.network.neutron [req-a884109a-cee4-4382-b17b-3f355f835121 req-95c8b4be-bc64-4214-95de-88bb81ee3717 service nova] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1075.432272] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1075.529655] env[70020]: DEBUG nova.network.neutron [req-971e159a-5d11-4bc2-8453-f49f316fb437 req-25527d2f-4c36-47f4-934a-90f3f515b5c2 service nova] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Updated VIF entry in instance network info cache for port 8e1b8b9c-b1c2-448e-8d9c-621c1810194a. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1075.530035] env[70020]: DEBUG nova.network.neutron [req-971e159a-5d11-4bc2-8453-f49f316fb437 req-25527d2f-4c36-47f4-934a-90f3f515b5c2 service nova] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Updating instance_info_cache with network_info: [{"id": "8e1b8b9c-b1c2-448e-8d9c-621c1810194a", "address": "fa:16:3e:44:89:42", "network": {"id": "b566519a-edda-4c57-92ca-a702e586c638", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-790170115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.129", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74b060ffb3ac4ecd95dcd85d4744dc2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e1b8b9c-b1", "ovs_interfaceid": "8e1b8b9c-b1c2-448e-8d9c-621c1810194a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1075.533106] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cd31135-f2e0-4d0a-a13b-e72fb176b2c3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.542346] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f6dd003-e246-4542-bcff-46eea0d1f320 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.575927] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98b7ff19-a733-4cfc-8c79-b46fa8632ab4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.585513] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b207fa3-86a9-47e7-946d-7feb0be83679 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.600020] env[70020]: DEBUG nova.compute.provider_tree [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1075.856640] env[70020]: DEBUG nova.network.neutron [-] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1075.877559] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6adc1f0d-600e-4a72-a787-0b0297d41778 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.889795] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7224aac7-ce61-4c95-aecb-ef09ee837ea6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.920620] env[70020]: DEBUG nova.compute.manager [req-a884109a-cee4-4382-b17b-3f355f835121 req-95c8b4be-bc64-4214-95de-88bb81ee3717 service nova] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Detach interface failed, port_id=1b623703-5d19-4e24-b8aa-5b76192f92f5, reason: Instance 9d1568bf-4027-4d4c-b089-276006eee715 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1076.032669] env[70020]: DEBUG oslo_concurrency.lockutils [req-971e159a-5d11-4bc2-8453-f49f316fb437 req-25527d2f-4c36-47f4-934a-90f3f515b5c2 service nova] Releasing lock "refresh_cache-8dbb1de0-38de-493f-9512-b8754bab7bcb" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1076.104224] env[70020]: DEBUG nova.scheduler.client.report [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1076.359032] env[70020]: INFO nova.compute.manager [-] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Took 1.35 seconds to deallocate network for instance. [ 1076.444526] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c34cdf8-42ce-4ea7-bce9-906c96a2aafd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.463910] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Updating instance '8adadb2e-2a20-45b1-bed8-34e09df25f39' progress to 0 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1076.611453] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.408s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1076.611989] env[70020]: DEBUG nova.compute.manager [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1076.614527] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.153s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1076.616562] env[70020]: INFO nova.compute.claims [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1076.865725] env[70020]: DEBUG oslo_concurrency.lockutils [None req-298e0ede-04d2-47dd-ac28-8e14e0155af4 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1076.969464] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1076.969777] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e9bcbc7e-7e16-44da-862e-2345d690e117 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.978729] env[70020]: DEBUG oslo_vmware.api [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1076.978729] env[70020]: value = "task-3618915" [ 1076.978729] env[70020]: _type = "Task" [ 1076.978729] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.988525] env[70020]: DEBUG oslo_vmware.api [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618915, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.123151] env[70020]: DEBUG nova.compute.utils [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1077.124583] env[70020]: DEBUG nova.compute.manager [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1077.124803] env[70020]: DEBUG nova.network.neutron [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1077.172628] env[70020]: DEBUG nova.policy [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1ebabdad8aa843f28165fcd167382c60', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cfa7d3b1f5a14c60b19cde5030c2f0a2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1077.440578] env[70020]: DEBUG nova.network.neutron [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Successfully created port: 9156bf4c-eba1-4b04-b328-19e9968900cb {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1077.489898] env[70020]: DEBUG oslo_vmware.api [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618915, 'name': PowerOffVM_Task, 'duration_secs': 0.26644} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.490231] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1077.490414] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Updating instance '8adadb2e-2a20-45b1-bed8-34e09df25f39' progress to 17 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1077.629086] env[70020]: DEBUG nova.compute.manager [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1077.913771] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7d289f4-3596-4fad-917d-29a2f7d15a85 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.921755] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abdff276-e72a-4ee4-8b03-939f33ae2b05 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.952086] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-087770e1-0c15-4e62-ac34-f4cf22e43b63 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.960294] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e28d2b87-6cb7-4e53-abd5-23e6f51a5800 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.974090] env[70020]: DEBUG nova.compute.provider_tree [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1077.997098] env[70020]: DEBUG nova.virt.hardware [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1077.997343] env[70020]: DEBUG nova.virt.hardware [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1077.997498] env[70020]: DEBUG nova.virt.hardware [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1077.997679] env[70020]: DEBUG nova.virt.hardware [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1077.997822] env[70020]: DEBUG nova.virt.hardware [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1077.997965] env[70020]: DEBUG nova.virt.hardware [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1077.998251] env[70020]: DEBUG nova.virt.hardware [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1077.998416] env[70020]: DEBUG nova.virt.hardware [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1077.998759] env[70020]: DEBUG nova.virt.hardware [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1077.998759] env[70020]: DEBUG nova.virt.hardware [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1077.998899] env[70020]: DEBUG nova.virt.hardware [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1078.004080] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d3f0d725-47c0-4d1a-98bf-5eb594f54dff {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.022067] env[70020]: DEBUG oslo_vmware.api [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1078.022067] env[70020]: value = "task-3618916" [ 1078.022067] env[70020]: _type = "Task" [ 1078.022067] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.031335] env[70020]: DEBUG oslo_vmware.api [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618916, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.479246] env[70020]: DEBUG nova.scheduler.client.report [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1078.532386] env[70020]: DEBUG oslo_vmware.api [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618916, 'name': ReconfigVM_Task, 'duration_secs': 0.284208} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.532698] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Updating instance '8adadb2e-2a20-45b1-bed8-34e09df25f39' progress to 33 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1078.644037] env[70020]: DEBUG nova.compute.manager [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1078.671739] env[70020]: DEBUG nova.virt.hardware [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1078.671992] env[70020]: DEBUG nova.virt.hardware [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1078.672177] env[70020]: DEBUG nova.virt.hardware [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1078.672364] env[70020]: DEBUG nova.virt.hardware [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1078.672505] env[70020]: DEBUG nova.virt.hardware [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1078.672648] env[70020]: DEBUG nova.virt.hardware [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1078.672851] env[70020]: DEBUG nova.virt.hardware [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1078.673092] env[70020]: DEBUG nova.virt.hardware [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1078.673265] env[70020]: DEBUG nova.virt.hardware [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1078.673421] env[70020]: DEBUG nova.virt.hardware [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1078.673586] env[70020]: DEBUG nova.virt.hardware [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1078.674668] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a402081-8419-4a8c-ad8d-dbc868c7ae24 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.683276] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0bcca04-af04-4820-8600-04bda64fcbd3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.894783] env[70020]: DEBUG nova.compute.manager [req-48afc51d-5826-4e4f-83d2-cb0dd19e88a5 req-e1e6b203-fb81-4579-a093-f835e077c159 service nova] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Received event network-vif-plugged-9156bf4c-eba1-4b04-b328-19e9968900cb {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1078.894783] env[70020]: DEBUG oslo_concurrency.lockutils [req-48afc51d-5826-4e4f-83d2-cb0dd19e88a5 req-e1e6b203-fb81-4579-a093-f835e077c159 service nova] Acquiring lock "e5c6ad2e-9925-4234-a7da-ea2618b7c7d5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1078.894783] env[70020]: DEBUG oslo_concurrency.lockutils [req-48afc51d-5826-4e4f-83d2-cb0dd19e88a5 req-e1e6b203-fb81-4579-a093-f835e077c159 service nova] Lock "e5c6ad2e-9925-4234-a7da-ea2618b7c7d5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1078.894783] env[70020]: DEBUG oslo_concurrency.lockutils [req-48afc51d-5826-4e4f-83d2-cb0dd19e88a5 req-e1e6b203-fb81-4579-a093-f835e077c159 service nova] Lock "e5c6ad2e-9925-4234-a7da-ea2618b7c7d5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1078.894981] env[70020]: DEBUG nova.compute.manager [req-48afc51d-5826-4e4f-83d2-cb0dd19e88a5 req-e1e6b203-fb81-4579-a093-f835e077c159 service nova] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] No waiting events found dispatching network-vif-plugged-9156bf4c-eba1-4b04-b328-19e9968900cb {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1078.895176] env[70020]: WARNING nova.compute.manager [req-48afc51d-5826-4e4f-83d2-cb0dd19e88a5 req-e1e6b203-fb81-4579-a093-f835e077c159 service nova] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Received unexpected event network-vif-plugged-9156bf4c-eba1-4b04-b328-19e9968900cb for instance with vm_state building and task_state spawning. [ 1078.982567] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.368s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1078.986053] env[70020]: DEBUG nova.compute.manager [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1078.987854] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.216s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1078.990706] env[70020]: INFO nova.compute.claims [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1078.994386] env[70020]: DEBUG nova.network.neutron [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Successfully updated port: 9156bf4c-eba1-4b04-b328-19e9968900cb {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1079.041064] env[70020]: DEBUG nova.virt.hardware [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1079.041064] env[70020]: DEBUG nova.virt.hardware [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1079.041064] env[70020]: DEBUG nova.virt.hardware [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1079.041064] env[70020]: DEBUG nova.virt.hardware [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1079.041064] env[70020]: DEBUG nova.virt.hardware [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1079.041064] env[70020]: DEBUG nova.virt.hardware [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1079.041064] env[70020]: DEBUG nova.virt.hardware [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1079.041064] env[70020]: DEBUG nova.virt.hardware [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1079.041064] env[70020]: DEBUG nova.virt.hardware [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1079.041064] env[70020]: DEBUG nova.virt.hardware [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1079.041064] env[70020]: DEBUG nova.virt.hardware [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1079.048743] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Reconfiguring VM instance instance-0000001d to detach disk 2000 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1079.050170] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a27fbcd3-023c-40c4-ae41-625ecbd7044c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.071607] env[70020]: DEBUG oslo_vmware.api [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1079.071607] env[70020]: value = "task-3618917" [ 1079.071607] env[70020]: _type = "Task" [ 1079.071607] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.081446] env[70020]: DEBUG oslo_vmware.api [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618917, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.489563] env[70020]: DEBUG nova.compute.utils [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1079.491051] env[70020]: DEBUG nova.compute.manager [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1079.491249] env[70020]: DEBUG nova.network.neutron [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1079.501500] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "refresh_cache-e5c6ad2e-9925-4234-a7da-ea2618b7c7d5" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.501682] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquired lock "refresh_cache-e5c6ad2e-9925-4234-a7da-ea2618b7c7d5" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1079.501865] env[70020]: DEBUG nova.network.neutron [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1079.544426] env[70020]: DEBUG nova.policy [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2b6c6acc12cc4c879bc276e3e278516b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd9625b7e3b5e49038c5df445b15d9ea4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1079.589014] env[70020]: DEBUG oslo_vmware.api [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618917, 'name': ReconfigVM_Task, 'duration_secs': 0.203211} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.589014] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Reconfigured VM instance instance-0000001d to detach disk 2000 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1079.589014] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-074fece3-a4c5-49e6-b2d5-6eacfe66a492 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.620473] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Reconfiguring VM instance instance-0000001d to attach disk [datastore1] 8adadb2e-2a20-45b1-bed8-34e09df25f39/8adadb2e-2a20-45b1-bed8-34e09df25f39.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1079.621291] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1404cb9c-470b-497b-8ad9-a9388f2ab53e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.644050] env[70020]: DEBUG oslo_vmware.api [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1079.644050] env[70020]: value = "task-3618918" [ 1079.644050] env[70020]: _type = "Task" [ 1079.644050] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.658363] env[70020]: DEBUG oslo_vmware.api [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618918, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.820756] env[70020]: DEBUG nova.network.neutron [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Successfully created port: 40ed203a-db04-4cf2-abb9-8399b1af2672 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1079.994967] env[70020]: DEBUG nova.compute.manager [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1080.045710] env[70020]: DEBUG nova.network.neutron [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1080.788601] env[70020]: DEBUG oslo_vmware.api [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618918, 'name': ReconfigVM_Task, 'duration_secs': 0.285752} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.790101] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Reconfigured VM instance instance-0000001d to attach disk [datastore1] 8adadb2e-2a20-45b1-bed8-34e09df25f39/8adadb2e-2a20-45b1-bed8-34e09df25f39.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1080.790101] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Updating instance '8adadb2e-2a20-45b1-bed8-34e09df25f39' progress to 50 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1080.940319] env[70020]: DEBUG nova.compute.manager [req-74d0b24b-70fc-402b-866d-31a445f645fa req-538153d1-8eb9-484a-8e1e-2fefd06676a5 service nova] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Received event network-changed-9156bf4c-eba1-4b04-b328-19e9968900cb {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1080.940319] env[70020]: DEBUG nova.compute.manager [req-74d0b24b-70fc-402b-866d-31a445f645fa req-538153d1-8eb9-484a-8e1e-2fefd06676a5 service nova] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Refreshing instance network info cache due to event network-changed-9156bf4c-eba1-4b04-b328-19e9968900cb. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1080.940319] env[70020]: DEBUG oslo_concurrency.lockutils [req-74d0b24b-70fc-402b-866d-31a445f645fa req-538153d1-8eb9-484a-8e1e-2fefd06676a5 service nova] Acquiring lock "refresh_cache-e5c6ad2e-9925-4234-a7da-ea2618b7c7d5" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.008646] env[70020]: DEBUG nova.network.neutron [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Updating instance_info_cache with network_info: [{"id": "9156bf4c-eba1-4b04-b328-19e9968900cb", "address": "fa:16:3e:ee:8d:bd", "network": {"id": "83a3ee04-e0ff-40e7-ae51-c463add43abb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2003290189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cfa7d3b1f5a14c60b19cde5030c2f0a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9156bf4c-eb", "ovs_interfaceid": "9156bf4c-eba1-4b04-b328-19e9968900cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1081.047843] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6079f44-f4e4-4c79-b3af-663571f692e3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.056473] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7c72022-5d3c-436f-ae6b-69b2f067e449 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.086959] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-798d5dae-5215-4c17-9200-3a6a0bc8d75b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.094975] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba34451d-7f24-4fb2-94de-1fd72afcf815 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.109343] env[70020]: DEBUG nova.compute.provider_tree [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1081.286285] env[70020]: DEBUG nova.compute.manager [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1081.298086] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2efcacec-85ef-42bb-ac8c-3cc0b3b8f849 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.319961] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ae6c278-179b-4e2d-9fd3-fc7c5be0f92d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.339625] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Updating instance '8adadb2e-2a20-45b1-bed8-34e09df25f39' progress to 67 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1081.345030] env[70020]: DEBUG nova.virt.hardware [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1081.345272] env[70020]: DEBUG nova.virt.hardware [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1081.345528] env[70020]: DEBUG nova.virt.hardware [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1081.345627] env[70020]: DEBUG nova.virt.hardware [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1081.345752] env[70020]: DEBUG nova.virt.hardware [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1081.345938] env[70020]: DEBUG nova.virt.hardware [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1081.346194] env[70020]: DEBUG nova.virt.hardware [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1081.346371] env[70020]: DEBUG nova.virt.hardware [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1081.346544] env[70020]: DEBUG nova.virt.hardware [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1081.346686] env[70020]: DEBUG nova.virt.hardware [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1081.346851] env[70020]: DEBUG nova.virt.hardware [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1081.347626] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-601aa6d8-977f-4891-9ec0-cfacc3c8e731 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.355798] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5d4463f-b0f6-4286-9034-75f5eb97ae0d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.511262] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Releasing lock "refresh_cache-e5c6ad2e-9925-4234-a7da-ea2618b7c7d5" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1081.511609] env[70020]: DEBUG nova.compute.manager [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Instance network_info: |[{"id": "9156bf4c-eba1-4b04-b328-19e9968900cb", "address": "fa:16:3e:ee:8d:bd", "network": {"id": "83a3ee04-e0ff-40e7-ae51-c463add43abb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2003290189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cfa7d3b1f5a14c60b19cde5030c2f0a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9156bf4c-eb", "ovs_interfaceid": "9156bf4c-eba1-4b04-b328-19e9968900cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1081.511919] env[70020]: DEBUG oslo_concurrency.lockutils [req-74d0b24b-70fc-402b-866d-31a445f645fa req-538153d1-8eb9-484a-8e1e-2fefd06676a5 service nova] Acquired lock "refresh_cache-e5c6ad2e-9925-4234-a7da-ea2618b7c7d5" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1081.512117] env[70020]: DEBUG nova.network.neutron [req-74d0b24b-70fc-402b-866d-31a445f645fa req-538153d1-8eb9-484a-8e1e-2fefd06676a5 service nova] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Refreshing network info cache for port 9156bf4c-eba1-4b04-b328-19e9968900cb {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1081.513406] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ee:8d:bd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d4ef133-b6f3-41d1-add4-92a1482195cf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9156bf4c-eba1-4b04-b328-19e9968900cb', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1081.526015] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1081.527470] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1081.527700] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ee0c15d6-3763-417e-94b4-95d035b1ac4f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.545666] env[70020]: DEBUG nova.network.neutron [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Successfully updated port: 40ed203a-db04-4cf2-abb9-8399b1af2672 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1081.554487] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1081.554487] env[70020]: value = "task-3618919" [ 1081.554487] env[70020]: _type = "Task" [ 1081.554487] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.564315] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618919, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.634557] env[70020]: ERROR nova.scheduler.client.report [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [req-18e6910b-2ca0-49da-a005-14905c0f251d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-18e6910b-2ca0-49da-a005-14905c0f251d"}]} [ 1081.651035] env[70020]: DEBUG nova.scheduler.client.report [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1081.668550] env[70020]: DEBUG nova.scheduler.client.report [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1081.668779] env[70020]: DEBUG nova.compute.provider_tree [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1081.680761] env[70020]: DEBUG nova.scheduler.client.report [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1081.699944] env[70020]: DEBUG nova.scheduler.client.report [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1081.889650] env[70020]: DEBUG nova.network.neutron [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Port 4b681dd6-fab3-4812-988e-26b219b6c5c3 binding to destination host cpu-1 is already ACTIVE {{(pid=70020) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1081.982620] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-179ef54d-4e37-4c30-b86c-b40967533aa5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.990988] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bdef37d-c35a-4687-aa18-469cdab20ac2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.023917] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c0f4da2-2eff-489b-8fd4-3aa52150ca12 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.035516] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-121ae59d-7d97-49d1-8a42-3127b2ceff91 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.048830] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Acquiring lock "refresh_cache-3163a070-a0db-4a41-af32-dfbe7a1766ac" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.048970] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Acquired lock "refresh_cache-3163a070-a0db-4a41-af32-dfbe7a1766ac" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1082.049136] env[70020]: DEBUG nova.network.neutron [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1082.050445] env[70020]: DEBUG nova.compute.provider_tree [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1082.064479] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618919, 'name': CreateVM_Task, 'duration_secs': 0.331928} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.066753] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1082.067625] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.067791] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1082.068127] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1082.068644] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d390fec7-0260-4534-bbc4-dd9e3f556ea1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.073492] env[70020]: DEBUG oslo_vmware.api [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 1082.073492] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52b055f4-856f-7e77-469c-07881683be62" [ 1082.073492] env[70020]: _type = "Task" [ 1082.073492] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.083231] env[70020]: DEBUG oslo_vmware.api [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b055f4-856f-7e77-469c-07881683be62, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.240104] env[70020]: DEBUG nova.network.neutron [req-74d0b24b-70fc-402b-866d-31a445f645fa req-538153d1-8eb9-484a-8e1e-2fefd06676a5 service nova] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Updated VIF entry in instance network info cache for port 9156bf4c-eba1-4b04-b328-19e9968900cb. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1082.240512] env[70020]: DEBUG nova.network.neutron [req-74d0b24b-70fc-402b-866d-31a445f645fa req-538153d1-8eb9-484a-8e1e-2fefd06676a5 service nova] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Updating instance_info_cache with network_info: [{"id": "9156bf4c-eba1-4b04-b328-19e9968900cb", "address": "fa:16:3e:ee:8d:bd", "network": {"id": "83a3ee04-e0ff-40e7-ae51-c463add43abb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2003290189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cfa7d3b1f5a14c60b19cde5030c2f0a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9156bf4c-eb", "ovs_interfaceid": "9156bf4c-eba1-4b04-b328-19e9968900cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1082.584988] env[70020]: DEBUG oslo_vmware.api [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b055f4-856f-7e77-469c-07881683be62, 'name': SearchDatastore_Task, 'duration_secs': 0.011893} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.585834] env[70020]: DEBUG nova.network.neutron [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1082.587837] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1082.587989] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1082.588211] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.588356] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1082.588530] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1082.589422] env[70020]: DEBUG nova.scheduler.client.report [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updated inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with generation 139 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1082.589637] env[70020]: DEBUG nova.compute.provider_tree [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updating resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d generation from 139 to 140 during operation: update_inventory {{(pid=70020) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1082.589810] env[70020]: DEBUG nova.compute.provider_tree [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1082.592875] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7a8f9ce3-fb61-4011-ac7d-d5eeac53eb61 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.604468] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1082.604695] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1082.617684] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-477b21de-129d-41e4-93d6-eea4113bcdac {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.617684] env[70020]: DEBUG oslo_vmware.api [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 1082.617684] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]529da9f7-bb64-38e1-b0cf-607fdf20761b" [ 1082.617684] env[70020]: _type = "Task" [ 1082.617684] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.622116] env[70020]: DEBUG oslo_vmware.api [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]529da9f7-bb64-38e1-b0cf-607fdf20761b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.715958] env[70020]: DEBUG nova.network.neutron [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Updating instance_info_cache with network_info: [{"id": "40ed203a-db04-4cf2-abb9-8399b1af2672", "address": "fa:16:3e:dd:76:4c", "network": {"id": "559f7162-6593-4b0c-9c31-8caac172d43a", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1174433615-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d9625b7e3b5e49038c5df445b15d9ea4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40ed203a-db", "ovs_interfaceid": "40ed203a-db04-4cf2-abb9-8399b1af2672", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1082.743182] env[70020]: DEBUG oslo_concurrency.lockutils [req-74d0b24b-70fc-402b-866d-31a445f645fa req-538153d1-8eb9-484a-8e1e-2fefd06676a5 service nova] Releasing lock "refresh_cache-e5c6ad2e-9925-4234-a7da-ea2618b7c7d5" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1082.913865] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "8adadb2e-2a20-45b1-bed8-34e09df25f39-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1082.913865] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "8adadb2e-2a20-45b1-bed8-34e09df25f39-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1082.913865] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "8adadb2e-2a20-45b1-bed8-34e09df25f39-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1082.967015] env[70020]: DEBUG nova.compute.manager [req-977d970b-1397-4a8b-bc01-3697e0b86202 req-85b5da40-631d-43ae-9dd4-8a4ff51a5cc4 service nova] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Received event network-vif-plugged-40ed203a-db04-4cf2-abb9-8399b1af2672 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1082.967249] env[70020]: DEBUG oslo_concurrency.lockutils [req-977d970b-1397-4a8b-bc01-3697e0b86202 req-85b5da40-631d-43ae-9dd4-8a4ff51a5cc4 service nova] Acquiring lock "3163a070-a0db-4a41-af32-dfbe7a1766ac-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1082.967455] env[70020]: DEBUG oslo_concurrency.lockutils [req-977d970b-1397-4a8b-bc01-3697e0b86202 req-85b5da40-631d-43ae-9dd4-8a4ff51a5cc4 service nova] Lock "3163a070-a0db-4a41-af32-dfbe7a1766ac-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1082.967619] env[70020]: DEBUG oslo_concurrency.lockutils [req-977d970b-1397-4a8b-bc01-3697e0b86202 req-85b5da40-631d-43ae-9dd4-8a4ff51a5cc4 service nova] Lock "3163a070-a0db-4a41-af32-dfbe7a1766ac-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1082.967782] env[70020]: DEBUG nova.compute.manager [req-977d970b-1397-4a8b-bc01-3697e0b86202 req-85b5da40-631d-43ae-9dd4-8a4ff51a5cc4 service nova] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] No waiting events found dispatching network-vif-plugged-40ed203a-db04-4cf2-abb9-8399b1af2672 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1082.968879] env[70020]: WARNING nova.compute.manager [req-977d970b-1397-4a8b-bc01-3697e0b86202 req-85b5da40-631d-43ae-9dd4-8a4ff51a5cc4 service nova] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Received unexpected event network-vif-plugged-40ed203a-db04-4cf2-abb9-8399b1af2672 for instance with vm_state building and task_state spawning. [ 1082.968879] env[70020]: DEBUG nova.compute.manager [req-977d970b-1397-4a8b-bc01-3697e0b86202 req-85b5da40-631d-43ae-9dd4-8a4ff51a5cc4 service nova] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Received event network-changed-40ed203a-db04-4cf2-abb9-8399b1af2672 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1082.968879] env[70020]: DEBUG nova.compute.manager [req-977d970b-1397-4a8b-bc01-3697e0b86202 req-85b5da40-631d-43ae-9dd4-8a4ff51a5cc4 service nova] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Refreshing instance network info cache due to event network-changed-40ed203a-db04-4cf2-abb9-8399b1af2672. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1082.968879] env[70020]: DEBUG oslo_concurrency.lockutils [req-977d970b-1397-4a8b-bc01-3697e0b86202 req-85b5da40-631d-43ae-9dd4-8a4ff51a5cc4 service nova] Acquiring lock "refresh_cache-3163a070-a0db-4a41-af32-dfbe7a1766ac" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.096702] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.109s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1083.097276] env[70020]: DEBUG nova.compute.manager [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1083.101059] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.459s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1083.101225] env[70020]: INFO nova.compute.claims [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1083.124241] env[70020]: DEBUG oslo_vmware.api [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]529da9f7-bb64-38e1-b0cf-607fdf20761b, 'name': SearchDatastore_Task, 'duration_secs': 0.010476} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.125052] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e459a7f-3420-459e-b9ea-88c97b699abb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.132053] env[70020]: DEBUG oslo_vmware.api [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 1083.132053] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52cfb3c5-9aa1-67d1-0363-d0f3586e121c" [ 1083.132053] env[70020]: _type = "Task" [ 1083.132053] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.145604] env[70020]: DEBUG oslo_vmware.api [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52cfb3c5-9aa1-67d1-0363-d0f3586e121c, 'name': SearchDatastore_Task, 'duration_secs': 0.010546} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.145604] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1083.146114] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] e5c6ad2e-9925-4234-a7da-ea2618b7c7d5/e5c6ad2e-9925-4234-a7da-ea2618b7c7d5.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1083.146114] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-20d19273-32bf-4316-a5d9-86c967aa7f0f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.153107] env[70020]: DEBUG oslo_vmware.api [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 1083.153107] env[70020]: value = "task-3618920" [ 1083.153107] env[70020]: _type = "Task" [ 1083.153107] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.161676] env[70020]: DEBUG oslo_vmware.api [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618920, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.218812] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Releasing lock "refresh_cache-3163a070-a0db-4a41-af32-dfbe7a1766ac" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1083.219205] env[70020]: DEBUG nova.compute.manager [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Instance network_info: |[{"id": "40ed203a-db04-4cf2-abb9-8399b1af2672", "address": "fa:16:3e:dd:76:4c", "network": {"id": "559f7162-6593-4b0c-9c31-8caac172d43a", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1174433615-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d9625b7e3b5e49038c5df445b15d9ea4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40ed203a-db", "ovs_interfaceid": "40ed203a-db04-4cf2-abb9-8399b1af2672", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1083.219527] env[70020]: DEBUG oslo_concurrency.lockutils [req-977d970b-1397-4a8b-bc01-3697e0b86202 req-85b5da40-631d-43ae-9dd4-8a4ff51a5cc4 service nova] Acquired lock "refresh_cache-3163a070-a0db-4a41-af32-dfbe7a1766ac" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1083.219871] env[70020]: DEBUG nova.network.neutron [req-977d970b-1397-4a8b-bc01-3697e0b86202 req-85b5da40-631d-43ae-9dd4-8a4ff51a5cc4 service nova] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Refreshing network info cache for port 40ed203a-db04-4cf2-abb9-8399b1af2672 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1083.221103] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dd:76:4c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92f3cfd6-c130-4390-8910-865fbc42afd1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '40ed203a-db04-4cf2-abb9-8399b1af2672', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1083.230154] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Creating folder: Project (d9625b7e3b5e49038c5df445b15d9ea4). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1083.233496] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7938123f-1dfe-4cd9-ac1e-ebdf2c89152c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.247570] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Created folder: Project (d9625b7e3b5e49038c5df445b15d9ea4) in parent group-v721521. [ 1083.247779] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Creating folder: Instances. Parent ref: group-v721793. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1083.248052] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-caaef5cd-84d4-4a81-8171-d3164349b4d0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.260357] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Created folder: Instances in parent group-v721793. [ 1083.260623] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1083.260833] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1083.261070] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-342ee85d-091c-47e2-859f-be0ff61e5e09 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.284883] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1083.284883] env[70020]: value = "task-3618923" [ 1083.284883] env[70020]: _type = "Task" [ 1083.284883] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.302835] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618923, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.487031] env[70020]: DEBUG nova.network.neutron [req-977d970b-1397-4a8b-bc01-3697e0b86202 req-85b5da40-631d-43ae-9dd4-8a4ff51a5cc4 service nova] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Updated VIF entry in instance network info cache for port 40ed203a-db04-4cf2-abb9-8399b1af2672. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1083.487549] env[70020]: DEBUG nova.network.neutron [req-977d970b-1397-4a8b-bc01-3697e0b86202 req-85b5da40-631d-43ae-9dd4-8a4ff51a5cc4 service nova] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Updating instance_info_cache with network_info: [{"id": "40ed203a-db04-4cf2-abb9-8399b1af2672", "address": "fa:16:3e:dd:76:4c", "network": {"id": "559f7162-6593-4b0c-9c31-8caac172d43a", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1174433615-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d9625b7e3b5e49038c5df445b15d9ea4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40ed203a-db", "ovs_interfaceid": "40ed203a-db04-4cf2-abb9-8399b1af2672", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1083.606244] env[70020]: DEBUG nova.compute.utils [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1083.609955] env[70020]: DEBUG nova.compute.manager [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1083.610279] env[70020]: DEBUG nova.network.neutron [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1083.653168] env[70020]: DEBUG nova.policy [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1374458c1943470eba7e774715ba1ca9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f3f6d704dd464768953c41d34d34d944', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1083.665244] env[70020]: DEBUG oslo_vmware.api [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618920, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.502883} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.665521] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] e5c6ad2e-9925-4234-a7da-ea2618b7c7d5/e5c6ad2e-9925-4234-a7da-ea2618b7c7d5.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1083.665745] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1083.665982] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0843b15a-99f0-4d16-80ba-e8fdb931eac9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.676310] env[70020]: DEBUG oslo_vmware.api [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 1083.676310] env[70020]: value = "task-3618924" [ 1083.676310] env[70020]: _type = "Task" [ 1083.676310] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.692146] env[70020]: DEBUG oslo_vmware.api [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618924, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.800121] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618923, 'name': CreateVM_Task, 'duration_secs': 0.40359} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.800393] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1083.801360] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.801637] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1083.802150] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1083.802537] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-250ff55b-8e4c-418d-97cd-d93da9d0795b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.809971] env[70020]: DEBUG oslo_vmware.api [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Waiting for the task: (returnval){ [ 1083.809971] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5229143e-3c97-1771-df10-d283d027d029" [ 1083.809971] env[70020]: _type = "Task" [ 1083.809971] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.823635] env[70020]: DEBUG oslo_vmware.api [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5229143e-3c97-1771-df10-d283d027d029, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.948877] env[70020]: DEBUG nova.network.neutron [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Successfully created port: 36abe182-e89a-4325-9d00-ce204c53a359 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1083.956498] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "refresh_cache-8adadb2e-2a20-45b1-bed8-34e09df25f39" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.956668] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquired lock "refresh_cache-8adadb2e-2a20-45b1-bed8-34e09df25f39" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1083.956840] env[70020]: DEBUG nova.network.neutron [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1083.990914] env[70020]: DEBUG oslo_concurrency.lockutils [req-977d970b-1397-4a8b-bc01-3697e0b86202 req-85b5da40-631d-43ae-9dd4-8a4ff51a5cc4 service nova] Releasing lock "refresh_cache-3163a070-a0db-4a41-af32-dfbe7a1766ac" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1084.114724] env[70020]: DEBUG nova.compute.manager [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1084.196206] env[70020]: DEBUG oslo_vmware.api [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618924, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073572} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.196519] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1084.197385] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eba7665-9fbe-47d4-8ce2-5e18a2da3dd7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.225194] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] e5c6ad2e-9925-4234-a7da-ea2618b7c7d5/e5c6ad2e-9925-4234-a7da-ea2618b7c7d5.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1084.228279] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3cda0ed6-5af5-4622-b24a-35c1f8f27c9f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.249651] env[70020]: DEBUG oslo_vmware.api [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 1084.249651] env[70020]: value = "task-3618925" [ 1084.249651] env[70020]: _type = "Task" [ 1084.249651] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.262431] env[70020]: DEBUG oslo_vmware.api [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618925, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.324567] env[70020]: DEBUG oslo_vmware.api [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5229143e-3c97-1771-df10-d283d027d029, 'name': SearchDatastore_Task, 'duration_secs': 0.01569} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.324882] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1084.325204] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1084.325493] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.325748] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1084.325913] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1084.326111] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9ee097e2-356f-444b-ad3f-68c9fb6c21b6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.341830] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1084.342045] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1084.342825] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7f30d46-396b-423c-b987-572848c7f940 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.349367] env[70020]: DEBUG oslo_vmware.api [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Waiting for the task: (returnval){ [ 1084.349367] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]520eb34a-074b-63a5-3715-b07d61d84455" [ 1084.349367] env[70020]: _type = "Task" [ 1084.349367] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.361113] env[70020]: DEBUG oslo_vmware.api [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]520eb34a-074b-63a5-3715-b07d61d84455, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.491860] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb365f78-4ff8-4300-b712-a09b181ef322 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.501926] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5f8fd78-6db4-46ca-9ef8-45df6f977f0f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.533034] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f767344-44d8-4f44-8796-90031d44185d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.544041] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2d15606-2fc6-43b3-8378-8904e28eb53c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.559025] env[70020]: DEBUG nova.compute.provider_tree [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1084.760697] env[70020]: DEBUG oslo_vmware.api [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618925, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.838681] env[70020]: DEBUG nova.network.neutron [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Updating instance_info_cache with network_info: [{"id": "4b681dd6-fab3-4812-988e-26b219b6c5c3", "address": "fa:16:3e:29:86:67", "network": {"id": "ba2c4d3c-4191-4de5-8533-90ca5dfc1dc0", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-599216784-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e3eae740ef84ef88aef113ed4d6e57b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b681dd6-fa", "ovs_interfaceid": "4b681dd6-fab3-4812-988e-26b219b6c5c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1084.863625] env[70020]: DEBUG oslo_vmware.api [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]520eb34a-074b-63a5-3715-b07d61d84455, 'name': SearchDatastore_Task, 'duration_secs': 0.023827} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.864481] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95142402-427f-47fb-8ee0-5d4fd5d30790 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.871156] env[70020]: DEBUG oslo_vmware.api [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Waiting for the task: (returnval){ [ 1084.871156] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]523a9723-193b-308f-eea5-eac6f677c516" [ 1084.871156] env[70020]: _type = "Task" [ 1084.871156] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.880407] env[70020]: DEBUG oslo_vmware.api [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]523a9723-193b-308f-eea5-eac6f677c516, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.062133] env[70020]: DEBUG nova.scheduler.client.report [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1085.127319] env[70020]: DEBUG nova.compute.manager [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1085.153689] env[70020]: DEBUG nova.virt.hardware [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1085.153923] env[70020]: DEBUG nova.virt.hardware [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1085.154086] env[70020]: DEBUG nova.virt.hardware [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1085.154271] env[70020]: DEBUG nova.virt.hardware [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1085.154446] env[70020]: DEBUG nova.virt.hardware [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1085.154609] env[70020]: DEBUG nova.virt.hardware [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1085.154811] env[70020]: DEBUG nova.virt.hardware [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1085.155016] env[70020]: DEBUG nova.virt.hardware [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1085.155179] env[70020]: DEBUG nova.virt.hardware [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1085.155342] env[70020]: DEBUG nova.virt.hardware [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1085.155511] env[70020]: DEBUG nova.virt.hardware [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1085.156404] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f65c78e-8dac-4f3f-9552-4a882ecda045 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.165521] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f9f33e3-2ede-44ae-b266-3f87148c270f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.260608] env[70020]: DEBUG oslo_vmware.api [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618925, 'name': ReconfigVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.343384] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Releasing lock "refresh_cache-8adadb2e-2a20-45b1-bed8-34e09df25f39" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1085.382487] env[70020]: DEBUG oslo_vmware.api [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]523a9723-193b-308f-eea5-eac6f677c516, 'name': SearchDatastore_Task, 'duration_secs': 0.011529} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.383141] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1085.383141] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 3163a070-a0db-4a41-af32-dfbe7a1766ac/3163a070-a0db-4a41-af32-dfbe7a1766ac.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1085.383340] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dbfc119b-483b-40b3-b58a-da0edc4a651b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.392324] env[70020]: DEBUG oslo_vmware.api [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Waiting for the task: (returnval){ [ 1085.392324] env[70020]: value = "task-3618926" [ 1085.392324] env[70020]: _type = "Task" [ 1085.392324] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.402658] env[70020]: DEBUG oslo_vmware.api [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Task: {'id': task-3618926, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.418706] env[70020]: DEBUG nova.compute.manager [req-be385574-b09d-4b72-9c3f-02e0220ba64a req-72ce2612-b101-4771-b4c0-405bf56ae2ac service nova] [instance: f1a09304-7725-489a-8669-322a51c709e5] Received event network-vif-plugged-36abe182-e89a-4325-9d00-ce204c53a359 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1085.418946] env[70020]: DEBUG oslo_concurrency.lockutils [req-be385574-b09d-4b72-9c3f-02e0220ba64a req-72ce2612-b101-4771-b4c0-405bf56ae2ac service nova] Acquiring lock "f1a09304-7725-489a-8669-322a51c709e5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1085.419256] env[70020]: DEBUG oslo_concurrency.lockutils [req-be385574-b09d-4b72-9c3f-02e0220ba64a req-72ce2612-b101-4771-b4c0-405bf56ae2ac service nova] Lock "f1a09304-7725-489a-8669-322a51c709e5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1085.419705] env[70020]: DEBUG oslo_concurrency.lockutils [req-be385574-b09d-4b72-9c3f-02e0220ba64a req-72ce2612-b101-4771-b4c0-405bf56ae2ac service nova] Lock "f1a09304-7725-489a-8669-322a51c709e5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1085.419705] env[70020]: DEBUG nova.compute.manager [req-be385574-b09d-4b72-9c3f-02e0220ba64a req-72ce2612-b101-4771-b4c0-405bf56ae2ac service nova] [instance: f1a09304-7725-489a-8669-322a51c709e5] No waiting events found dispatching network-vif-plugged-36abe182-e89a-4325-9d00-ce204c53a359 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1085.419705] env[70020]: WARNING nova.compute.manager [req-be385574-b09d-4b72-9c3f-02e0220ba64a req-72ce2612-b101-4771-b4c0-405bf56ae2ac service nova] [instance: f1a09304-7725-489a-8669-322a51c709e5] Received unexpected event network-vif-plugged-36abe182-e89a-4325-9d00-ce204c53a359 for instance with vm_state building and task_state spawning. [ 1085.489789] env[70020]: DEBUG nova.network.neutron [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Successfully updated port: 36abe182-e89a-4325-9d00-ce204c53a359 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1085.568595] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.469s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1085.569359] env[70020]: DEBUG nova.compute.manager [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1085.573230] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6e9a3a1f-c2ef-4dc1-98eb-e8e9c25236b2 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.158s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1085.573411] env[70020]: DEBUG nova.objects.instance [None req-6e9a3a1f-c2ef-4dc1-98eb-e8e9c25236b2 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lazy-loading 'resources' on Instance uuid 04de1a07-cf38-41e0-be96-237bbe1ead83 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1085.763540] env[70020]: DEBUG oslo_vmware.api [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618925, 'name': ReconfigVM_Task, 'duration_secs': 1.036884} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.763915] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Reconfigured VM instance instance-00000064 to attach disk [datastore1] e5c6ad2e-9925-4234-a7da-ea2618b7c7d5/e5c6ad2e-9925-4234-a7da-ea2618b7c7d5.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1085.764803] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-17cc7b4a-da00-47ac-aa1d-3e621a0e851c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.775241] env[70020]: DEBUG oslo_vmware.api [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 1085.775241] env[70020]: value = "task-3618927" [ 1085.775241] env[70020]: _type = "Task" [ 1085.775241] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.792188] env[70020]: DEBUG oslo_vmware.api [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618927, 'name': Rename_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.874825] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc974d98-7e7e-4948-8bd8-a946c2cc2151 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.901192] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e21484df-345b-49fc-a150-e36ceaeea789 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.914189] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Updating instance '8adadb2e-2a20-45b1-bed8-34e09df25f39' progress to 83 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1085.917993] env[70020]: DEBUG oslo_vmware.api [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Task: {'id': task-3618926, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.992662] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "refresh_cache-f1a09304-7725-489a-8669-322a51c709e5" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1085.992816] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquired lock "refresh_cache-f1a09304-7725-489a-8669-322a51c709e5" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1085.993014] env[70020]: DEBUG nova.network.neutron [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1086.076307] env[70020]: DEBUG nova.compute.utils [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1086.077749] env[70020]: DEBUG nova.compute.manager [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1086.077962] env[70020]: DEBUG nova.network.neutron [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1086.119443] env[70020]: DEBUG nova.policy [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'be48663580324ff2a7603c72dc797b55', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bb8b67e3378148eca1733c991ae16e83', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1086.287638] env[70020]: DEBUG oslo_vmware.api [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618927, 'name': Rename_Task, 'duration_secs': 0.276292} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.288125] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1086.288478] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b8da3c62-9035-4fc6-a957-861ca56661c4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.299255] env[70020]: DEBUG oslo_vmware.api [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 1086.299255] env[70020]: value = "task-3618928" [ 1086.299255] env[70020]: _type = "Task" [ 1086.299255] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.306111] env[70020]: DEBUG oslo_vmware.api [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618928, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.369080] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53857d6f-2143-4b4e-a48f-83b296df65d5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.377277] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d59854e7-8746-4036-a4db-0416502a1e18 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.410988] env[70020]: DEBUG nova.network.neutron [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Successfully created port: 54da9b83-48b2-4a5a-b141-d4c02fe9fdb8 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1086.416205] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bf15d1a-00f5-4f47-bc0b-f18f6e8fe8e1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.420281] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1086.421200] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-91d65eb7-5edf-4548-a5c5-e70a7b4c93f8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.427343] env[70020]: DEBUG oslo_vmware.api [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Task: {'id': task-3618926, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.561072} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.429449] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 3163a070-a0db-4a41-af32-dfbe7a1766ac/3163a070-a0db-4a41-af32-dfbe7a1766ac.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1086.429674] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1086.430852] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2be448c5-c0a7-4a8f-baf4-e0a5a9106700 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.433560] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caa07359-a79b-40b5-acfb-73600b7b3aa6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.437624] env[70020]: DEBUG oslo_vmware.api [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1086.437624] env[70020]: value = "task-3618929" [ 1086.437624] env[70020]: _type = "Task" [ 1086.437624] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.442784] env[70020]: DEBUG oslo_vmware.api [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Waiting for the task: (returnval){ [ 1086.442784] env[70020]: value = "task-3618930" [ 1086.442784] env[70020]: _type = "Task" [ 1086.442784] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.452633] env[70020]: DEBUG nova.compute.provider_tree [None req-6e9a3a1f-c2ef-4dc1-98eb-e8e9c25236b2 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1086.462053] env[70020]: DEBUG oslo_vmware.api [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618929, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.468447] env[70020]: DEBUG oslo_vmware.api [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Task: {'id': task-3618930, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.540361] env[70020]: DEBUG nova.network.neutron [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1086.583452] env[70020]: DEBUG nova.compute.manager [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1086.739603] env[70020]: DEBUG nova.network.neutron [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Updating instance_info_cache with network_info: [{"id": "36abe182-e89a-4325-9d00-ce204c53a359", "address": "fa:16:3e:c8:b7:65", "network": {"id": "c7c6ab1d-12b0-4699-ab0e-47b8d23ee3bc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2032377329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3f6d704dd464768953c41d34d34d944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6a6f7bb-6106-4cfd-9aef-b85628d0cefa", "external-id": "nsx-vlan-transportzone-194", "segmentation_id": 194, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36abe182-e8", "ovs_interfaceid": "36abe182-e89a-4325-9d00-ce204c53a359", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1086.808685] env[70020]: DEBUG oslo_vmware.api [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618928, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.948928] env[70020]: DEBUG oslo_vmware.api [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618929, 'name': PowerOnVM_Task, 'duration_secs': 0.448272} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.949407] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1086.949593] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5d4353ba-266d-4647-a65b-20cee30bce58 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Updating instance '8adadb2e-2a20-45b1-bed8-34e09df25f39' progress to 100 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1086.966014] env[70020]: DEBUG oslo_vmware.api [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Task: {'id': task-3618930, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07503} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.966567] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1086.967692] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3b91a1c-c14d-4cc0-834b-b52df59a0f64 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.993996] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] 3163a070-a0db-4a41-af32-dfbe7a1766ac/3163a070-a0db-4a41-af32-dfbe7a1766ac.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1086.994410] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c5e212cb-1862-4a1d-8a4d-6cd45dff7685 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.010144] env[70020]: DEBUG nova.scheduler.client.report [None req-6e9a3a1f-c2ef-4dc1-98eb-e8e9c25236b2 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updated inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with generation 140 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1087.010587] env[70020]: DEBUG nova.compute.provider_tree [None req-6e9a3a1f-c2ef-4dc1-98eb-e8e9c25236b2 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updating resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d generation from 140 to 141 during operation: update_inventory {{(pid=70020) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1087.010631] env[70020]: DEBUG nova.compute.provider_tree [None req-6e9a3a1f-c2ef-4dc1-98eb-e8e9c25236b2 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1087.020533] env[70020]: DEBUG oslo_vmware.api [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Waiting for the task: (returnval){ [ 1087.020533] env[70020]: value = "task-3618931" [ 1087.020533] env[70020]: _type = "Task" [ 1087.020533] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.033839] env[70020]: DEBUG oslo_vmware.api [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Task: {'id': task-3618931, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.242362] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Releasing lock "refresh_cache-f1a09304-7725-489a-8669-322a51c709e5" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1087.242699] env[70020]: DEBUG nova.compute.manager [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Instance network_info: |[{"id": "36abe182-e89a-4325-9d00-ce204c53a359", "address": "fa:16:3e:c8:b7:65", "network": {"id": "c7c6ab1d-12b0-4699-ab0e-47b8d23ee3bc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2032377329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3f6d704dd464768953c41d34d34d944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6a6f7bb-6106-4cfd-9aef-b85628d0cefa", "external-id": "nsx-vlan-transportzone-194", "segmentation_id": 194, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36abe182-e8", "ovs_interfaceid": "36abe182-e89a-4325-9d00-ce204c53a359", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1087.243165] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c8:b7:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd6a6f7bb-6106-4cfd-9aef-b85628d0cefa', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '36abe182-e89a-4325-9d00-ce204c53a359', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1087.250602] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1087.250817] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1a09304-7725-489a-8669-322a51c709e5] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1087.251067] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c6022d10-8d34-40bb-b1bc-1e6327d08bf5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.273671] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1087.273671] env[70020]: value = "task-3618932" [ 1087.273671] env[70020]: _type = "Task" [ 1087.273671] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.287518] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618932, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.309702] env[70020]: DEBUG oslo_vmware.api [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618928, 'name': PowerOnVM_Task, 'duration_secs': 0.693988} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.309799] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1087.309995] env[70020]: INFO nova.compute.manager [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Took 8.67 seconds to spawn the instance on the hypervisor. [ 1087.310186] env[70020]: DEBUG nova.compute.manager [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1087.310954] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f884f8a8-cd5f-46d8-ada8-1c2dee198d7a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.448414] env[70020]: DEBUG nova.compute.manager [req-e9f1d8ca-847c-458e-a469-35175b32b07d req-0bb90a1e-fc23-4f30-ab83-b8cae43b38f6 service nova] [instance: f1a09304-7725-489a-8669-322a51c709e5] Received event network-changed-36abe182-e89a-4325-9d00-ce204c53a359 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1087.448633] env[70020]: DEBUG nova.compute.manager [req-e9f1d8ca-847c-458e-a469-35175b32b07d req-0bb90a1e-fc23-4f30-ab83-b8cae43b38f6 service nova] [instance: f1a09304-7725-489a-8669-322a51c709e5] Refreshing instance network info cache due to event network-changed-36abe182-e89a-4325-9d00-ce204c53a359. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1087.448863] env[70020]: DEBUG oslo_concurrency.lockutils [req-e9f1d8ca-847c-458e-a469-35175b32b07d req-0bb90a1e-fc23-4f30-ab83-b8cae43b38f6 service nova] Acquiring lock "refresh_cache-f1a09304-7725-489a-8669-322a51c709e5" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1087.449039] env[70020]: DEBUG oslo_concurrency.lockutils [req-e9f1d8ca-847c-458e-a469-35175b32b07d req-0bb90a1e-fc23-4f30-ab83-b8cae43b38f6 service nova] Acquired lock "refresh_cache-f1a09304-7725-489a-8669-322a51c709e5" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1087.449224] env[70020]: DEBUG nova.network.neutron [req-e9f1d8ca-847c-458e-a469-35175b32b07d req-0bb90a1e-fc23-4f30-ab83-b8cae43b38f6 service nova] [instance: f1a09304-7725-489a-8669-322a51c709e5] Refreshing network info cache for port 36abe182-e89a-4325-9d00-ce204c53a359 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1087.516496] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6e9a3a1f-c2ef-4dc1-98eb-e8e9c25236b2 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.943s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1087.519512] env[70020]: DEBUG oslo_concurrency.lockutils [None req-91c61070-b4f1-44bf-9ee8-5a979ec3056d tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.574s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1087.519728] env[70020]: DEBUG nova.objects.instance [None req-91c61070-b4f1-44bf-9ee8-5a979ec3056d tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Lazy-loading 'resources' on Instance uuid 3dedfa48-0839-462e-8c32-ba5252f07ac0 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1087.532145] env[70020]: DEBUG oslo_vmware.api [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Task: {'id': task-3618931, 'name': ReconfigVM_Task, 'duration_secs': 0.501202} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.532478] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Reconfigured VM instance instance-00000065 to attach disk [datastore1] 3163a070-a0db-4a41-af32-dfbe7a1766ac/3163a070-a0db-4a41-af32-dfbe7a1766ac.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1087.533638] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5ea325bf-4f31-4b54-90a4-8df744c550ed {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.537265] env[70020]: INFO nova.scheduler.client.report [None req-6e9a3a1f-c2ef-4dc1-98eb-e8e9c25236b2 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Deleted allocations for instance 04de1a07-cf38-41e0-be96-237bbe1ead83 [ 1087.544557] env[70020]: DEBUG oslo_vmware.api [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Waiting for the task: (returnval){ [ 1087.544557] env[70020]: value = "task-3618933" [ 1087.544557] env[70020]: _type = "Task" [ 1087.544557] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.558019] env[70020]: DEBUG oslo_vmware.api [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Task: {'id': task-3618933, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.594779] env[70020]: DEBUG nova.compute.manager [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1087.624659] env[70020]: DEBUG nova.virt.hardware [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1087.624907] env[70020]: DEBUG nova.virt.hardware [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1087.625094] env[70020]: DEBUG nova.virt.hardware [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1087.625296] env[70020]: DEBUG nova.virt.hardware [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1087.625438] env[70020]: DEBUG nova.virt.hardware [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1087.625579] env[70020]: DEBUG nova.virt.hardware [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1087.625784] env[70020]: DEBUG nova.virt.hardware [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1087.625937] env[70020]: DEBUG nova.virt.hardware [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1087.626151] env[70020]: DEBUG nova.virt.hardware [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1087.626332] env[70020]: DEBUG nova.virt.hardware [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1087.626504] env[70020]: DEBUG nova.virt.hardware [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1087.627702] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feacae37-312d-4d87-a1eb-3d326848fa0c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.636187] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9396fb89-f0ef-4b87-95b8-12a543705b15 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.790871] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618932, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.830886] env[70020]: INFO nova.compute.manager [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Took 25.74 seconds to build instance. [ 1087.949263] env[70020]: DEBUG nova.network.neutron [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Successfully updated port: 54da9b83-48b2-4a5a-b141-d4c02fe9fdb8 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1088.053119] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6e9a3a1f-c2ef-4dc1-98eb-e8e9c25236b2 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "04de1a07-cf38-41e0-be96-237bbe1ead83" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.254s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.064107] env[70020]: DEBUG oslo_vmware.api [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Task: {'id': task-3618933, 'name': Rename_Task, 'duration_secs': 0.158757} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.064389] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1088.064640] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0206af05-73f2-4058-b1f7-c365514ed70d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.073153] env[70020]: DEBUG oslo_vmware.api [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Waiting for the task: (returnval){ [ 1088.073153] env[70020]: value = "task-3618934" [ 1088.073153] env[70020]: _type = "Task" [ 1088.073153] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.084835] env[70020]: DEBUG oslo_vmware.api [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Task: {'id': task-3618934, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.154920] env[70020]: DEBUG nova.network.neutron [req-e9f1d8ca-847c-458e-a469-35175b32b07d req-0bb90a1e-fc23-4f30-ab83-b8cae43b38f6 service nova] [instance: f1a09304-7725-489a-8669-322a51c709e5] Updated VIF entry in instance network info cache for port 36abe182-e89a-4325-9d00-ce204c53a359. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1088.155497] env[70020]: DEBUG nova.network.neutron [req-e9f1d8ca-847c-458e-a469-35175b32b07d req-0bb90a1e-fc23-4f30-ab83-b8cae43b38f6 service nova] [instance: f1a09304-7725-489a-8669-322a51c709e5] Updating instance_info_cache with network_info: [{"id": "36abe182-e89a-4325-9d00-ce204c53a359", "address": "fa:16:3e:c8:b7:65", "network": {"id": "c7c6ab1d-12b0-4699-ab0e-47b8d23ee3bc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2032377329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3f6d704dd464768953c41d34d34d944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6a6f7bb-6106-4cfd-9aef-b85628d0cefa", "external-id": "nsx-vlan-transportzone-194", "segmentation_id": 194, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36abe182-e8", "ovs_interfaceid": "36abe182-e89a-4325-9d00-ce204c53a359", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1088.286853] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618932, 'name': CreateVM_Task, 'duration_secs': 0.604512} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.289335] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1a09304-7725-489a-8669-322a51c709e5] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1088.290504] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.290504] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1088.290680] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1088.290910] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9506765a-bc5a-4786-b244-e990d29d3477 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.296300] env[70020]: DEBUG oslo_vmware.api [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1088.296300] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52fde1d8-4f3f-d586-5f7d-870002046f00" [ 1088.296300] env[70020]: _type = "Task" [ 1088.296300] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.305448] env[70020]: DEBUG oslo_vmware.api [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52fde1d8-4f3f-d586-5f7d-870002046f00, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.312128] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc7db74f-187a-43b1-a8d3-a3628eb0862c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.319594] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-738c2d1f-c6d8-4c58-9688-e57736691afa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.351318] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c06a08c8-5840-4d6d-80cb-cc7d203af594 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "e5c6ad2e-9925-4234-a7da-ea2618b7c7d5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.267s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.352288] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58aed88e-1290-4031-b506-d0bb48e783f8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.362098] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95cfaad8-b4ad-43c7-8529-c014fb687f24 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.367557] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abf82e8a-43e6-4563-90b3-ca4647c47eb5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.374101] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4baca396-f62a-4712-a8c7-27d1b0637927 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Suspending the VM {{(pid=70020) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1088.383307] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-c1e1aee9-0795-4952-8308-bef657b60b4c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.385104] env[70020]: DEBUG nova.compute.provider_tree [None req-91c61070-b4f1-44bf-9ee8-5a979ec3056d tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1088.392594] env[70020]: DEBUG oslo_vmware.api [None req-4baca396-f62a-4712-a8c7-27d1b0637927 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 1088.392594] env[70020]: value = "task-3618935" [ 1088.392594] env[70020]: _type = "Task" [ 1088.392594] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.403072] env[70020]: DEBUG oslo_vmware.api [None req-4baca396-f62a-4712-a8c7-27d1b0637927 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618935, 'name': SuspendVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.452604] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Acquiring lock "refresh_cache-db24c4e0-f778-4488-b9cb-a06b21932b4e" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.452808] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Acquired lock "refresh_cache-db24c4e0-f778-4488-b9cb-a06b21932b4e" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1088.453018] env[70020]: DEBUG nova.network.neutron [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1088.589016] env[70020]: DEBUG oslo_vmware.api [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Task: {'id': task-3618934, 'name': PowerOnVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.658766] env[70020]: DEBUG oslo_concurrency.lockutils [req-e9f1d8ca-847c-458e-a469-35175b32b07d req-0bb90a1e-fc23-4f30-ab83-b8cae43b38f6 service nova] Releasing lock "refresh_cache-f1a09304-7725-489a-8669-322a51c709e5" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1088.810110] env[70020]: DEBUG oslo_vmware.api [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52fde1d8-4f3f-d586-5f7d-870002046f00, 'name': SearchDatastore_Task, 'duration_secs': 0.011176} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.810776] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1088.811047] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1088.811319] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.811472] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1088.811666] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1088.811950] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a5bf4c05-2f65-4a95-ac7e-f61f83580120 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.822513] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1088.822750] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1088.823555] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a28c1c65-9ed2-482f-a537-8c6ea0641189 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.830118] env[70020]: DEBUG oslo_vmware.api [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1088.830118] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52dbeb88-5df0-a49b-4b82-d55c961f9be4" [ 1088.830118] env[70020]: _type = "Task" [ 1088.830118] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.839264] env[70020]: DEBUG oslo_vmware.api [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52dbeb88-5df0-a49b-4b82-d55c961f9be4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.888960] env[70020]: DEBUG nova.scheduler.client.report [None req-91c61070-b4f1-44bf-9ee8-5a979ec3056d tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1088.902594] env[70020]: DEBUG oslo_vmware.api [None req-4baca396-f62a-4712-a8c7-27d1b0637927 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618935, 'name': SuspendVM_Task} progress is 62%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.989465] env[70020]: DEBUG nova.network.neutron [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1089.090415] env[70020]: DEBUG oslo_vmware.api [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Task: {'id': task-3618934, 'name': PowerOnVM_Task, 'duration_secs': 0.606649} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.091190] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1089.091190] env[70020]: INFO nova.compute.manager [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Took 7.80 seconds to spawn the instance on the hypervisor. [ 1089.091190] env[70020]: DEBUG nova.compute.manager [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1089.091837] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32ea306f-3226-4351-a077-f0acaff5bf7f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.161985] env[70020]: DEBUG nova.network.neutron [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Updating instance_info_cache with network_info: [{"id": "54da9b83-48b2-4a5a-b141-d4c02fe9fdb8", "address": "fa:16:3e:6a:6d:69", "network": {"id": "9c051024-cf3e-4b8b-8301-86d94009b99d", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1959892819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb8b67e3378148eca1733c991ae16e83", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2624812a-9f9c-461d-8b5f-79bea90c7ad3", "external-id": "nsx-vlan-transportzone-123", "segmentation_id": 123, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54da9b83-48", "ovs_interfaceid": "54da9b83-48b2-4a5a-b141-d4c02fe9fdb8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1089.169362] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f8ed5a56-f322-4e4a-afe3-91d1ddb14bfe tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "8adadb2e-2a20-45b1-bed8-34e09df25f39" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1089.169618] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f8ed5a56-f322-4e4a-afe3-91d1ddb14bfe tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "8adadb2e-2a20-45b1-bed8-34e09df25f39" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1089.169797] env[70020]: DEBUG nova.compute.manager [None req-f8ed5a56-f322-4e4a-afe3-91d1ddb14bfe tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Going to confirm migration 3 {{(pid=70020) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1089.342184] env[70020]: DEBUG oslo_vmware.api [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52dbeb88-5df0-a49b-4b82-d55c961f9be4, 'name': SearchDatastore_Task, 'duration_secs': 0.01152} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.343087] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-054deb89-c565-4fdf-830c-dd26e2e195bc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.349637] env[70020]: DEBUG oslo_vmware.api [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1089.349637] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52c3854f-b7b3-422f-9561-e03f29e64610" [ 1089.349637] env[70020]: _type = "Task" [ 1089.349637] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.359310] env[70020]: DEBUG oslo_vmware.api [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52c3854f-b7b3-422f-9561-e03f29e64610, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.393831] env[70020]: DEBUG oslo_concurrency.lockutils [None req-91c61070-b4f1-44bf-9ee8-5a979ec3056d tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.875s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1089.396378] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 19.924s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1089.396577] env[70020]: DEBUG nova.objects.instance [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Trying to apply a migration context that does not seem to be set for this instance {{(pid=70020) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1089.409672] env[70020]: DEBUG oslo_vmware.api [None req-4baca396-f62a-4712-a8c7-27d1b0637927 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618935, 'name': SuspendVM_Task, 'duration_secs': 0.705699} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.409938] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4baca396-f62a-4712-a8c7-27d1b0637927 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Suspended the VM {{(pid=70020) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1089.410126] env[70020]: DEBUG nova.compute.manager [None req-4baca396-f62a-4712-a8c7-27d1b0637927 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1089.411187] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d4a6325-72b0-495e-9694-af9929132523 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.422276] env[70020]: INFO nova.scheduler.client.report [None req-91c61070-b4f1-44bf-9ee8-5a979ec3056d tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Deleted allocations for instance 3dedfa48-0839-462e-8c32-ba5252f07ac0 [ 1089.475784] env[70020]: DEBUG nova.compute.manager [req-94a74055-7d91-40e7-9453-a33aa72f79e1 req-9e6fb7d2-d33d-4c4e-9f26-7f5121d8b54d service nova] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Received event network-vif-plugged-54da9b83-48b2-4a5a-b141-d4c02fe9fdb8 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1089.476222] env[70020]: DEBUG oslo_concurrency.lockutils [req-94a74055-7d91-40e7-9453-a33aa72f79e1 req-9e6fb7d2-d33d-4c4e-9f26-7f5121d8b54d service nova] Acquiring lock "db24c4e0-f778-4488-b9cb-a06b21932b4e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1089.476486] env[70020]: DEBUG oslo_concurrency.lockutils [req-94a74055-7d91-40e7-9453-a33aa72f79e1 req-9e6fb7d2-d33d-4c4e-9f26-7f5121d8b54d service nova] Lock "db24c4e0-f778-4488-b9cb-a06b21932b4e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1089.476662] env[70020]: DEBUG oslo_concurrency.lockutils [req-94a74055-7d91-40e7-9453-a33aa72f79e1 req-9e6fb7d2-d33d-4c4e-9f26-7f5121d8b54d service nova] Lock "db24c4e0-f778-4488-b9cb-a06b21932b4e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1089.476831] env[70020]: DEBUG nova.compute.manager [req-94a74055-7d91-40e7-9453-a33aa72f79e1 req-9e6fb7d2-d33d-4c4e-9f26-7f5121d8b54d service nova] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] No waiting events found dispatching network-vif-plugged-54da9b83-48b2-4a5a-b141-d4c02fe9fdb8 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1089.477020] env[70020]: WARNING nova.compute.manager [req-94a74055-7d91-40e7-9453-a33aa72f79e1 req-9e6fb7d2-d33d-4c4e-9f26-7f5121d8b54d service nova] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Received unexpected event network-vif-plugged-54da9b83-48b2-4a5a-b141-d4c02fe9fdb8 for instance with vm_state building and task_state spawning. [ 1089.477199] env[70020]: DEBUG nova.compute.manager [req-94a74055-7d91-40e7-9453-a33aa72f79e1 req-9e6fb7d2-d33d-4c4e-9f26-7f5121d8b54d service nova] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Received event network-changed-54da9b83-48b2-4a5a-b141-d4c02fe9fdb8 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1089.477356] env[70020]: DEBUG nova.compute.manager [req-94a74055-7d91-40e7-9453-a33aa72f79e1 req-9e6fb7d2-d33d-4c4e-9f26-7f5121d8b54d service nova] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Refreshing instance network info cache due to event network-changed-54da9b83-48b2-4a5a-b141-d4c02fe9fdb8. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1089.477519] env[70020]: DEBUG oslo_concurrency.lockutils [req-94a74055-7d91-40e7-9453-a33aa72f79e1 req-9e6fb7d2-d33d-4c4e-9f26-7f5121d8b54d service nova] Acquiring lock "refresh_cache-db24c4e0-f778-4488-b9cb-a06b21932b4e" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.613028] env[70020]: INFO nova.compute.manager [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Took 27.17 seconds to build instance. [ 1089.665025] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Releasing lock "refresh_cache-db24c4e0-f778-4488-b9cb-a06b21932b4e" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1089.665025] env[70020]: DEBUG nova.compute.manager [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Instance network_info: |[{"id": "54da9b83-48b2-4a5a-b141-d4c02fe9fdb8", "address": "fa:16:3e:6a:6d:69", "network": {"id": "9c051024-cf3e-4b8b-8301-86d94009b99d", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1959892819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb8b67e3378148eca1733c991ae16e83", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2624812a-9f9c-461d-8b5f-79bea90c7ad3", "external-id": "nsx-vlan-transportzone-123", "segmentation_id": 123, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54da9b83-48", "ovs_interfaceid": "54da9b83-48b2-4a5a-b141-d4c02fe9fdb8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1089.665238] env[70020]: DEBUG oslo_concurrency.lockutils [req-94a74055-7d91-40e7-9453-a33aa72f79e1 req-9e6fb7d2-d33d-4c4e-9f26-7f5121d8b54d service nova] Acquired lock "refresh_cache-db24c4e0-f778-4488-b9cb-a06b21932b4e" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1089.665280] env[70020]: DEBUG nova.network.neutron [req-94a74055-7d91-40e7-9453-a33aa72f79e1 req-9e6fb7d2-d33d-4c4e-9f26-7f5121d8b54d service nova] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Refreshing network info cache for port 54da9b83-48b2-4a5a-b141-d4c02fe9fdb8 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1089.666489] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6a:6d:69', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2624812a-9f9c-461d-8b5f-79bea90c7ad3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '54da9b83-48b2-4a5a-b141-d4c02fe9fdb8', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1089.673690] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Creating folder: Project (bb8b67e3378148eca1733c991ae16e83). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1089.676862] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-130df115-02a3-42e7-a9cd-ef4dd81b0f7f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.694364] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Created folder: Project (bb8b67e3378148eca1733c991ae16e83) in parent group-v721521. [ 1089.694553] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Creating folder: Instances. Parent ref: group-v721797. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1089.694788] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-be3a7a1e-5690-46ef-9d2e-5792f4adff2d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.706222] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Created folder: Instances in parent group-v721797. [ 1089.706485] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1089.706675] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1089.706880] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cec73950-6c39-40c9-a207-ce17691482a4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.726841] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1089.726841] env[70020]: value = "task-3618938" [ 1089.726841] env[70020]: _type = "Task" [ 1089.726841] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.734734] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618938, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.735935] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f8ed5a56-f322-4e4a-afe3-91d1ddb14bfe tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "refresh_cache-8adadb2e-2a20-45b1-bed8-34e09df25f39" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.736119] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f8ed5a56-f322-4e4a-afe3-91d1ddb14bfe tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquired lock "refresh_cache-8adadb2e-2a20-45b1-bed8-34e09df25f39" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1089.736295] env[70020]: DEBUG nova.network.neutron [None req-f8ed5a56-f322-4e4a-afe3-91d1ddb14bfe tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1089.736475] env[70020]: DEBUG nova.objects.instance [None req-f8ed5a56-f322-4e4a-afe3-91d1ddb14bfe tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lazy-loading 'info_cache' on Instance uuid 8adadb2e-2a20-45b1-bed8-34e09df25f39 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1089.860929] env[70020]: DEBUG oslo_vmware.api [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52c3854f-b7b3-422f-9561-e03f29e64610, 'name': SearchDatastore_Task, 'duration_secs': 0.011262} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.861315] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1089.861473] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] f1a09304-7725-489a-8669-322a51c709e5/f1a09304-7725-489a-8669-322a51c709e5.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1089.861744] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ac74eff3-5d60-4b17-a020-0a57cb306c66 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.869255] env[70020]: DEBUG oslo_vmware.api [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1089.869255] env[70020]: value = "task-3618939" [ 1089.869255] env[70020]: _type = "Task" [ 1089.869255] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.878975] env[70020]: DEBUG oslo_vmware.api [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618939, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.929846] env[70020]: DEBUG oslo_concurrency.lockutils [None req-91c61070-b4f1-44bf-9ee8-5a979ec3056d tempest-AttachInterfacesUnderV243Test-760224301 tempest-AttachInterfacesUnderV243Test-760224301-project-member] Lock "3dedfa48-0839-462e-8c32-ba5252f07ac0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.827s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.116368] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4ef87d9f-8db4-4311-9349-fafbb63e6cef tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Lock "3163a070-a0db-4a41-af32-dfbe7a1766ac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.684s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.241724] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618938, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.379500] env[70020]: DEBUG oslo_vmware.api [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618939, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.47859} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.379863] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] f1a09304-7725-489a-8669-322a51c709e5/f1a09304-7725-489a-8669-322a51c709e5.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1090.379972] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1090.380758] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a83a04f7-3698-4932-abc1-9aaa03d60dfd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.388695] env[70020]: DEBUG oslo_vmware.api [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1090.388695] env[70020]: value = "task-3618940" [ 1090.388695] env[70020]: _type = "Task" [ 1090.388695] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.403408] env[70020]: DEBUG oslo_vmware.api [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618940, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.404709] env[70020]: DEBUG nova.network.neutron [req-94a74055-7d91-40e7-9453-a33aa72f79e1 req-9e6fb7d2-d33d-4c4e-9f26-7f5121d8b54d service nova] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Updated VIF entry in instance network info cache for port 54da9b83-48b2-4a5a-b141-d4c02fe9fdb8. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1090.404709] env[70020]: DEBUG nova.network.neutron [req-94a74055-7d91-40e7-9453-a33aa72f79e1 req-9e6fb7d2-d33d-4c4e-9f26-7f5121d8b54d service nova] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Updating instance_info_cache with network_info: [{"id": "54da9b83-48b2-4a5a-b141-d4c02fe9fdb8", "address": "fa:16:3e:6a:6d:69", "network": {"id": "9c051024-cf3e-4b8b-8301-86d94009b99d", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1959892819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb8b67e3378148eca1733c991ae16e83", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2624812a-9f9c-461d-8b5f-79bea90c7ad3", "external-id": "nsx-vlan-transportzone-123", "segmentation_id": 123, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54da9b83-48", "ovs_interfaceid": "54da9b83-48b2-4a5a-b141-d4c02fe9fdb8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.411640] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2cf717db-0d8c-4524-bf17-f212b4f26126 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.412595] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d7af7123-688f-405f-8e94-c6e826ac8cd9 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.223s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1090.412810] env[70020]: DEBUG nova.objects.instance [None req-d7af7123-688f-405f-8e94-c6e826ac8cd9 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Lazy-loading 'resources' on Instance uuid 97fe6c57-03de-4cf8-a990-ff4f88db6cd7 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1090.745919] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618938, 'name': CreateVM_Task, 'duration_secs': 0.54349} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.746875] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1090.747866] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1090.748053] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1090.748381] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1090.748642] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7edd950f-7457-4c82-9759-598dff021e86 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.753954] env[70020]: DEBUG oslo_vmware.api [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Waiting for the task: (returnval){ [ 1090.753954] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52b7b0bf-645e-d92f-f711-d8f3d2ce132f" [ 1090.753954] env[70020]: _type = "Task" [ 1090.753954] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.762965] env[70020]: DEBUG oslo_vmware.api [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b7b0bf-645e-d92f-f711-d8f3d2ce132f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.899101] env[70020]: DEBUG oslo_vmware.api [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618940, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069281} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.900035] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1090.900180] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec960fff-081d-48b1-9db8-32a7f285706c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.915441] env[70020]: DEBUG oslo_concurrency.lockutils [req-94a74055-7d91-40e7-9453-a33aa72f79e1 req-9e6fb7d2-d33d-4c4e-9f26-7f5121d8b54d service nova] Releasing lock "refresh_cache-db24c4e0-f778-4488-b9cb-a06b21932b4e" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1090.915914] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b38451ea-4e47-44d0-b69e-c3b331736001 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "e5c6ad2e-9925-4234-a7da-ea2618b7c7d5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1090.916151] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b38451ea-4e47-44d0-b69e-c3b331736001 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "e5c6ad2e-9925-4234-a7da-ea2618b7c7d5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1090.916373] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b38451ea-4e47-44d0-b69e-c3b331736001 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "e5c6ad2e-9925-4234-a7da-ea2618b7c7d5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1090.916539] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b38451ea-4e47-44d0-b69e-c3b331736001 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "e5c6ad2e-9925-4234-a7da-ea2618b7c7d5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1090.916699] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b38451ea-4e47-44d0-b69e-c3b331736001 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "e5c6ad2e-9925-4234-a7da-ea2618b7c7d5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.930824] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] f1a09304-7725-489a-8669-322a51c709e5/f1a09304-7725-489a-8669-322a51c709e5.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1090.931154] env[70020]: INFO nova.compute.manager [None req-b38451ea-4e47-44d0-b69e-c3b331736001 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Terminating instance [ 1090.934986] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d32df401-3f38-4af4-8a65-9aa9726870cd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.960634] env[70020]: DEBUG oslo_vmware.api [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1090.960634] env[70020]: value = "task-3618941" [ 1090.960634] env[70020]: _type = "Task" [ 1090.960634] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.970166] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ef6d8cc0-451b-4b46-bcaf-0eedc7adb4e2 tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Acquiring lock "3163a070-a0db-4a41-af32-dfbe7a1766ac" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1090.970426] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ef6d8cc0-451b-4b46-bcaf-0eedc7adb4e2 tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Lock "3163a070-a0db-4a41-af32-dfbe7a1766ac" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1090.970629] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ef6d8cc0-451b-4b46-bcaf-0eedc7adb4e2 tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Acquiring lock "3163a070-a0db-4a41-af32-dfbe7a1766ac-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1090.970812] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ef6d8cc0-451b-4b46-bcaf-0eedc7adb4e2 tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Lock "3163a070-a0db-4a41-af32-dfbe7a1766ac-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1090.970977] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ef6d8cc0-451b-4b46-bcaf-0eedc7adb4e2 tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Lock "3163a070-a0db-4a41-af32-dfbe7a1766ac-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.972770] env[70020]: DEBUG oslo_vmware.api [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618941, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.973459] env[70020]: INFO nova.compute.manager [None req-ef6d8cc0-451b-4b46-bcaf-0eedc7adb4e2 tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Terminating instance [ 1091.030625] env[70020]: DEBUG nova.network.neutron [None req-f8ed5a56-f322-4e4a-afe3-91d1ddb14bfe tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Updating instance_info_cache with network_info: [{"id": "4b681dd6-fab3-4812-988e-26b219b6c5c3", "address": "fa:16:3e:29:86:67", "network": {"id": "ba2c4d3c-4191-4de5-8533-90ca5dfc1dc0", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-599216784-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e3eae740ef84ef88aef113ed4d6e57b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b681dd6-fa", "ovs_interfaceid": "4b681dd6-fab3-4812-988e-26b219b6c5c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.216330] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84079b9e-1394-4e73-88b9-6d88c696d871 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.226492] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36c49c91-f6e6-4a50-b319-9c0f01bd16d5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.262307] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-467deaf9-8f1e-473b-90f9-420a7e0e7a8b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.270767] env[70020]: DEBUG oslo_vmware.api [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b7b0bf-645e-d92f-f711-d8f3d2ce132f, 'name': SearchDatastore_Task, 'duration_secs': 0.010596} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.273063] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1091.273314] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1091.273550] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1091.273697] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1091.273870] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1091.274180] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-60757f59-2e95-4c9f-8b3b-0fa18f2dea68 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.276851] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d365f36-6ba9-4dba-9401-3000974a5dff {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.290612] env[70020]: DEBUG nova.compute.provider_tree [None req-d7af7123-688f-405f-8e94-c6e826ac8cd9 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1091.292884] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1091.293074] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1091.293975] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59026bbc-36d8-47bc-8981-a719dc2194fb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.299490] env[70020]: DEBUG oslo_vmware.api [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Waiting for the task: (returnval){ [ 1091.299490] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]522a9bc0-65a7-d157-84c2-3b7015179f2f" [ 1091.299490] env[70020]: _type = "Task" [ 1091.299490] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.308590] env[70020]: DEBUG oslo_vmware.api [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]522a9bc0-65a7-d157-84c2-3b7015179f2f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.453983] env[70020]: DEBUG nova.compute.manager [None req-b38451ea-4e47-44d0-b69e-c3b331736001 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1091.454325] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b38451ea-4e47-44d0-b69e-c3b331736001 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1091.455208] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b3908d-fa2e-4b57-a49b-c3ff03cfe365 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.465068] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b38451ea-4e47-44d0-b69e-c3b331736001 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1091.465643] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-44b9e674-6592-45d9-8ea5-3b1bd462e43d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.470575] env[70020]: DEBUG oslo_vmware.api [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618941, 'name': ReconfigVM_Task, 'duration_secs': 0.286406} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.470889] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Reconfigured VM instance instance-00000066 to attach disk [datastore1] f1a09304-7725-489a-8669-322a51c709e5/f1a09304-7725-489a-8669-322a51c709e5.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1091.471556] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-215dbd89-85f5-41a4-9a85-a2c74cbc3ec2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.478894] env[70020]: DEBUG oslo_vmware.api [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1091.478894] env[70020]: value = "task-3618943" [ 1091.478894] env[70020]: _type = "Task" [ 1091.478894] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.482147] env[70020]: DEBUG nova.compute.manager [None req-ef6d8cc0-451b-4b46-bcaf-0eedc7adb4e2 tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1091.482405] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ef6d8cc0-451b-4b46-bcaf-0eedc7adb4e2 tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1091.483252] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70b7cc88-bee0-494f-852a-821c14a1a656 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.493319] env[70020]: DEBUG oslo_vmware.api [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618943, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.495386] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef6d8cc0-451b-4b46-bcaf-0eedc7adb4e2 tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1091.495482] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3c9484b6-6238-4bc3-9603-d43fab754791 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.504163] env[70020]: DEBUG oslo_vmware.api [None req-ef6d8cc0-451b-4b46-bcaf-0eedc7adb4e2 tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Waiting for the task: (returnval){ [ 1091.504163] env[70020]: value = "task-3618944" [ 1091.504163] env[70020]: _type = "Task" [ 1091.504163] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.512837] env[70020]: DEBUG oslo_vmware.api [None req-ef6d8cc0-451b-4b46-bcaf-0eedc7adb4e2 tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Task: {'id': task-3618944, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.533984] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f8ed5a56-f322-4e4a-afe3-91d1ddb14bfe tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Releasing lock "refresh_cache-8adadb2e-2a20-45b1-bed8-34e09df25f39" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1091.534310] env[70020]: DEBUG nova.objects.instance [None req-f8ed5a56-f322-4e4a-afe3-91d1ddb14bfe tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lazy-loading 'migration_context' on Instance uuid 8adadb2e-2a20-45b1-bed8-34e09df25f39 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1091.546484] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b38451ea-4e47-44d0-b69e-c3b331736001 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1091.546650] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b38451ea-4e47-44d0-b69e-c3b331736001 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1091.546925] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-b38451ea-4e47-44d0-b69e-c3b331736001 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Deleting the datastore file [datastore1] e5c6ad2e-9925-4234-a7da-ea2618b7c7d5 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1091.547300] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-11515b87-3dd3-49d5-80ec-a229b78ba501 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.556285] env[70020]: DEBUG oslo_vmware.api [None req-b38451ea-4e47-44d0-b69e-c3b331736001 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 1091.556285] env[70020]: value = "task-3618945" [ 1091.556285] env[70020]: _type = "Task" [ 1091.556285] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.570023] env[70020]: DEBUG oslo_vmware.api [None req-b38451ea-4e47-44d0-b69e-c3b331736001 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618945, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.795963] env[70020]: DEBUG nova.scheduler.client.report [None req-d7af7123-688f-405f-8e94-c6e826ac8cd9 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1091.818783] env[70020]: DEBUG oslo_vmware.api [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]522a9bc0-65a7-d157-84c2-3b7015179f2f, 'name': SearchDatastore_Task, 'duration_secs': 0.010972} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.820364] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e3c0e4a-25e1-46d4-bb6a-3c494ee6a9d8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.832305] env[70020]: DEBUG oslo_vmware.api [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Waiting for the task: (returnval){ [ 1091.832305] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52fafa8e-0f74-4c63-b889-ad31a499c8a2" [ 1091.832305] env[70020]: _type = "Task" [ 1091.832305] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.842695] env[70020]: DEBUG oslo_vmware.api [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52fafa8e-0f74-4c63-b889-ad31a499c8a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.990286] env[70020]: DEBUG oslo_vmware.api [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618943, 'name': Rename_Task, 'duration_secs': 0.182983} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.990762] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1091.991118] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-23be3fa1-bd3d-4c19-83ef-45eb21fb2811 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.998766] env[70020]: DEBUG oslo_vmware.api [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1091.998766] env[70020]: value = "task-3618946" [ 1091.998766] env[70020]: _type = "Task" [ 1091.998766] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.007451] env[70020]: DEBUG oslo_vmware.api [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618946, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.015895] env[70020]: DEBUG oslo_vmware.api [None req-ef6d8cc0-451b-4b46-bcaf-0eedc7adb4e2 tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Task: {'id': task-3618944, 'name': PowerOffVM_Task, 'duration_secs': 0.209404} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.016197] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef6d8cc0-451b-4b46-bcaf-0eedc7adb4e2 tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1092.016365] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ef6d8cc0-451b-4b46-bcaf-0eedc7adb4e2 tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1092.016663] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4e43c8c0-7938-4645-9e95-d69ef9e6f8ee {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.038868] env[70020]: DEBUG nova.objects.base [None req-f8ed5a56-f322-4e4a-afe3-91d1ddb14bfe tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Object Instance<8adadb2e-2a20-45b1-bed8-34e09df25f39> lazy-loaded attributes: info_cache,migration_context {{(pid=70020) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1092.039760] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38966eb8-bfcb-440b-9adb-aed5100ac47d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.064496] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3ecb41f-adcf-4a3c-9f23-48a85084eedf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.071516] env[70020]: DEBUG oslo_vmware.api [None req-f8ed5a56-f322-4e4a-afe3-91d1ddb14bfe tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1092.071516] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]529d81f1-845c-8b79-18a6-194190274fde" [ 1092.071516] env[70020]: _type = "Task" [ 1092.071516] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.075066] env[70020]: DEBUG oslo_vmware.api [None req-b38451ea-4e47-44d0-b69e-c3b331736001 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3618945, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160983} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.080483] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-b38451ea-4e47-44d0-b69e-c3b331736001 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1092.080729] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b38451ea-4e47-44d0-b69e-c3b331736001 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1092.080995] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b38451ea-4e47-44d0-b69e-c3b331736001 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1092.081228] env[70020]: INFO nova.compute.manager [None req-b38451ea-4e47-44d0-b69e-c3b331736001 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1092.081515] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b38451ea-4e47-44d0-b69e-c3b331736001 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1092.081833] env[70020]: DEBUG nova.compute.manager [-] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1092.081942] env[70020]: DEBUG nova.network.neutron [-] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1092.091334] env[70020]: DEBUG oslo_vmware.api [None req-f8ed5a56-f322-4e4a-afe3-91d1ddb14bfe tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]529d81f1-845c-8b79-18a6-194190274fde, 'name': SearchDatastore_Task, 'duration_secs': 0.00932} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.091334] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f8ed5a56-f322-4e4a-afe3-91d1ddb14bfe tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1092.094477] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ef6d8cc0-451b-4b46-bcaf-0eedc7adb4e2 tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1092.094622] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ef6d8cc0-451b-4b46-bcaf-0eedc7adb4e2 tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1092.094853] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef6d8cc0-451b-4b46-bcaf-0eedc7adb4e2 tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Deleting the datastore file [datastore1] 3163a070-a0db-4a41-af32-dfbe7a1766ac {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1092.095066] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9e6ea7c2-6d7e-4e33-976c-f5bd77723d71 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.103278] env[70020]: DEBUG oslo_vmware.api [None req-ef6d8cc0-451b-4b46-bcaf-0eedc7adb4e2 tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Waiting for the task: (returnval){ [ 1092.103278] env[70020]: value = "task-3618948" [ 1092.103278] env[70020]: _type = "Task" [ 1092.103278] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.116074] env[70020]: DEBUG oslo_vmware.api [None req-ef6d8cc0-451b-4b46-bcaf-0eedc7adb4e2 tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Task: {'id': task-3618948, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.309877] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d7af7123-688f-405f-8e94-c6e826ac8cd9 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.897s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1092.312399] env[70020]: DEBUG oslo_concurrency.lockutils [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.058s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1092.314356] env[70020]: INFO nova.compute.claims [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1092.346185] env[70020]: DEBUG oslo_vmware.api [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52fafa8e-0f74-4c63-b889-ad31a499c8a2, 'name': SearchDatastore_Task, 'duration_secs': 0.012378} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.349017] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1092.349017] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] db24c4e0-f778-4488-b9cb-a06b21932b4e/db24c4e0-f778-4488-b9cb-a06b21932b4e.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1092.349017] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-72724f79-ea0e-4934-b635-d64d8d53ad03 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.352151] env[70020]: INFO nova.scheduler.client.report [None req-d7af7123-688f-405f-8e94-c6e826ac8cd9 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Deleted allocations for instance 97fe6c57-03de-4cf8-a990-ff4f88db6cd7 [ 1092.362466] env[70020]: DEBUG oslo_vmware.api [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Waiting for the task: (returnval){ [ 1092.362466] env[70020]: value = "task-3618949" [ 1092.362466] env[70020]: _type = "Task" [ 1092.362466] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.364545] env[70020]: DEBUG nova.compute.manager [req-6ec4ff64-1b23-4b6a-aaa3-d77f912f92ef req-1c2b4696-f824-4bfd-92cf-619bb60c8f1f service nova] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Received event network-vif-deleted-9156bf4c-eba1-4b04-b328-19e9968900cb {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1092.364760] env[70020]: INFO nova.compute.manager [req-6ec4ff64-1b23-4b6a-aaa3-d77f912f92ef req-1c2b4696-f824-4bfd-92cf-619bb60c8f1f service nova] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Neutron deleted interface 9156bf4c-eba1-4b04-b328-19e9968900cb; detaching it from the instance and deleting it from the info cache [ 1092.364957] env[70020]: DEBUG nova.network.neutron [req-6ec4ff64-1b23-4b6a-aaa3-d77f912f92ef req-1c2b4696-f824-4bfd-92cf-619bb60c8f1f service nova] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1092.377596] env[70020]: DEBUG oslo_vmware.api [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Task: {'id': task-3618949, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.510686] env[70020]: DEBUG oslo_vmware.api [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618946, 'name': PowerOnVM_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.615377] env[70020]: DEBUG oslo_vmware.api [None req-ef6d8cc0-451b-4b46-bcaf-0eedc7adb4e2 tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Task: {'id': task-3618948, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.236045} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.616240] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef6d8cc0-451b-4b46-bcaf-0eedc7adb4e2 tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1092.616240] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ef6d8cc0-451b-4b46-bcaf-0eedc7adb4e2 tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1092.616240] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ef6d8cc0-451b-4b46-bcaf-0eedc7adb4e2 tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1092.616240] env[70020]: INFO nova.compute.manager [None req-ef6d8cc0-451b-4b46-bcaf-0eedc7adb4e2 tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1092.616496] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ef6d8cc0-451b-4b46-bcaf-0eedc7adb4e2 tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1092.616708] env[70020]: DEBUG nova.compute.manager [-] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1092.616831] env[70020]: DEBUG nova.network.neutron [-] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1092.838645] env[70020]: DEBUG nova.network.neutron [-] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1092.871125] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d7af7123-688f-405f-8e94-c6e826ac8cd9 tempest-ServerTagsTestJSON-601315959 tempest-ServerTagsTestJSON-601315959-project-member] Lock "97fe6c57-03de-4cf8-a990-ff4f88db6cd7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.095s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1092.877184] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4cc5a9db-7f71-4eea-a44e-1c4dbd8989e0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.886848] env[70020]: DEBUG oslo_vmware.api [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Task: {'id': task-3618949, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.895845] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19d91f4d-3802-4bb6-904f-ad5fea7f55ee {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.936070] env[70020]: DEBUG nova.compute.manager [req-6ec4ff64-1b23-4b6a-aaa3-d77f912f92ef req-1c2b4696-f824-4bfd-92cf-619bb60c8f1f service nova] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Detach interface failed, port_id=9156bf4c-eba1-4b04-b328-19e9968900cb, reason: Instance e5c6ad2e-9925-4234-a7da-ea2618b7c7d5 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1093.010985] env[70020]: DEBUG oslo_vmware.api [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618946, 'name': PowerOnVM_Task, 'duration_secs': 0.561078} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.011365] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1093.012494] env[70020]: INFO nova.compute.manager [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Took 7.88 seconds to spawn the instance on the hypervisor. [ 1093.012716] env[70020]: DEBUG nova.compute.manager [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1093.013502] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c457326-6eaf-4906-aa6e-460189755147 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.342170] env[70020]: INFO nova.compute.manager [-] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Took 1.26 seconds to deallocate network for instance. [ 1093.382724] env[70020]: DEBUG oslo_vmware.api [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Task: {'id': task-3618949, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.531606} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.384375] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] db24c4e0-f778-4488-b9cb-a06b21932b4e/db24c4e0-f778-4488-b9cb-a06b21932b4e.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1093.385075] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1093.385607] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ae5b09f2-56db-4c82-8968-88f87f4a23f1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.397664] env[70020]: DEBUG oslo_vmware.api [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Waiting for the task: (returnval){ [ 1093.397664] env[70020]: value = "task-3618950" [ 1093.397664] env[70020]: _type = "Task" [ 1093.397664] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.410383] env[70020]: DEBUG oslo_vmware.api [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Task: {'id': task-3618950, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.489954] env[70020]: DEBUG nova.network.neutron [-] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.535602] env[70020]: INFO nova.compute.manager [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Took 28.79 seconds to build instance. [ 1093.630421] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3390264-78d9-456c-83ca-db1d9f011e2a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.640925] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d27778e-851e-45a4-94d7-b498b502ff5b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.676907] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32779503-ae2c-4005-900e-0505a67e8a29 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.685617] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4ac4167-c1b4-4eed-865f-480a2e91773d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.703703] env[70020]: DEBUG nova.compute.provider_tree [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1093.850869] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b38451ea-4e47-44d0-b69e-c3b331736001 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1093.912979] env[70020]: DEBUG oslo_vmware.api [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Task: {'id': task-3618950, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068794} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.912979] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1093.912979] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37e6d7de-c736-46a9-9550-5d6890de3052 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.940164] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] db24c4e0-f778-4488-b9cb-a06b21932b4e/db24c4e0-f778-4488-b9cb-a06b21932b4e.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1093.940955] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3bb94c7b-a3b1-4f2d-8536-d5697ad00c5e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.962220] env[70020]: DEBUG oslo_vmware.api [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Waiting for the task: (returnval){ [ 1093.962220] env[70020]: value = "task-3618951" [ 1093.962220] env[70020]: _type = "Task" [ 1093.962220] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.971682] env[70020]: DEBUG oslo_vmware.api [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Task: {'id': task-3618951, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.999655] env[70020]: INFO nova.compute.manager [-] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Took 1.38 seconds to deallocate network for instance. [ 1094.039396] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f0c4cd5c-4ad1-4bcc-83a7-ca4e3d5f0d95 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "f1a09304-7725-489a-8669-322a51c709e5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.301s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1094.207169] env[70020]: DEBUG nova.scheduler.client.report [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1094.389873] env[70020]: DEBUG nova.compute.manager [req-b968955b-23b8-4e03-83c0-6f25dd40b691 req-a7076d63-8868-437d-8565-312c834bdc0b service nova] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Received event network-vif-deleted-40ed203a-db04-4cf2-abb9-8399b1af2672 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1094.472471] env[70020]: DEBUG oslo_vmware.api [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Task: {'id': task-3618951, 'name': ReconfigVM_Task, 'duration_secs': 0.292351} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.472817] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Reconfigured VM instance instance-00000067 to attach disk [datastore1] db24c4e0-f778-4488-b9cb-a06b21932b4e/db24c4e0-f778-4488-b9cb-a06b21932b4e.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1094.473551] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-176e4173-c797-47e8-8a0d-bf24acf1ee54 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.480534] env[70020]: DEBUG oslo_vmware.api [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Waiting for the task: (returnval){ [ 1094.480534] env[70020]: value = "task-3618952" [ 1094.480534] env[70020]: _type = "Task" [ 1094.480534] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.489304] env[70020]: DEBUG oslo_vmware.api [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Task: {'id': task-3618952, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.505568] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ef6d8cc0-451b-4b46-bcaf-0eedc7adb4e2 tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1094.712919] env[70020]: DEBUG oslo_concurrency.lockutils [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.400s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1094.713556] env[70020]: DEBUG nova.compute.manager [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1094.718024] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a4547a75-8fb0-4460-be2b-324223a0ea7c tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.071s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1094.718024] env[70020]: DEBUG nova.objects.instance [None req-a4547a75-8fb0-4460-be2b-324223a0ea7c tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Lazy-loading 'resources' on Instance uuid 2198e7f8-5458-4b97-abb3-0a3c932cebc2 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1094.945670] env[70020]: INFO nova.compute.manager [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Rebuilding instance [ 1094.992297] env[70020]: DEBUG oslo_vmware.api [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Task: {'id': task-3618952, 'name': Rename_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.992715] env[70020]: DEBUG nova.compute.manager [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1094.993452] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-000c5ed8-3f0c-45eb-b5da-d862fa5d22c7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.220598] env[70020]: DEBUG nova.compute.utils [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1095.225231] env[70020]: DEBUG nova.compute.manager [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1095.225575] env[70020]: DEBUG nova.network.neutron [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1095.276765] env[70020]: DEBUG nova.policy [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'afb49d648a70426fa7c39789e51ab625', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e7aae0b70f9d465ebcb9defe385fa434', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1095.479490] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a09502b-0e27-439c-bece-762f1a89c07c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.493779] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e95040f-1f58-43c4-813b-064ba5be57ff {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.500122] env[70020]: DEBUG oslo_vmware.api [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Task: {'id': task-3618952, 'name': Rename_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.529816] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f0b389-6dc9-46b8-ac4d-dd292d4006fd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.539160] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f916d92a-8853-415a-8851-359f643df0cb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.555424] env[70020]: DEBUG nova.compute.provider_tree [None req-a4547a75-8fb0-4460-be2b-324223a0ea7c tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1095.572175] env[70020]: DEBUG nova.network.neutron [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Successfully created port: 2573d470-4c75-40c7-9e9b-6130f5e14092 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1095.729023] env[70020]: DEBUG nova.compute.manager [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1095.995887] env[70020]: DEBUG oslo_vmware.api [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Task: {'id': task-3618952, 'name': Rename_Task, 'duration_secs': 1.142433} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.996267] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1095.996575] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a8d02d37-af89-4bf0-ab91-d810eb302d7d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.004735] env[70020]: DEBUG oslo_vmware.api [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Waiting for the task: (returnval){ [ 1096.004735] env[70020]: value = "task-3618953" [ 1096.004735] env[70020]: _type = "Task" [ 1096.004735] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.014885] env[70020]: DEBUG oslo_vmware.api [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Task: {'id': task-3618953, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.029933] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1096.030161] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3862fd23-4280-4030-8849-8b0c35b4366e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.040921] env[70020]: DEBUG oslo_vmware.api [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1096.040921] env[70020]: value = "task-3618954" [ 1096.040921] env[70020]: _type = "Task" [ 1096.040921] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.051175] env[70020]: DEBUG oslo_vmware.api [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618954, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.059036] env[70020]: DEBUG nova.scheduler.client.report [None req-a4547a75-8fb0-4460-be2b-324223a0ea7c tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1096.514923] env[70020]: DEBUG oslo_vmware.api [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Task: {'id': task-3618953, 'name': PowerOnVM_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.549383] env[70020]: DEBUG oslo_vmware.api [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618954, 'name': PowerOffVM_Task, 'duration_secs': 0.191752} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.549635] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1096.549860] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1096.550611] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebfa4234-5c05-4a25-8eea-e7b13c82f31e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.557532] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1096.557759] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d4e9e52b-0fe7-408c-bc41-8823876268c2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.564017] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a4547a75-8fb0-4460-be2b-324223a0ea7c tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.848s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1096.566103] env[70020]: DEBUG oslo_concurrency.lockutils [None req-49947e67-928d-488e-af87-c580e4249f8b tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.641s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1096.566322] env[70020]: DEBUG nova.objects.instance [None req-49947e67-928d-488e-af87-c580e4249f8b tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Lazy-loading 'resources' on Instance uuid a39731d2-0b9b-41fa-b9ac-f80193a26d20 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1096.586854] env[70020]: INFO nova.scheduler.client.report [None req-a4547a75-8fb0-4460-be2b-324223a0ea7c tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Deleted allocations for instance 2198e7f8-5458-4b97-abb3-0a3c932cebc2 [ 1096.621618] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1096.621618] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1096.621791] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Deleting the datastore file [datastore1] f1a09304-7725-489a-8669-322a51c709e5 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1096.622023] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-50299e6c-e9fd-4417-89ba-a12652a52364 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.630104] env[70020]: DEBUG oslo_vmware.api [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1096.630104] env[70020]: value = "task-3618956" [ 1096.630104] env[70020]: _type = "Task" [ 1096.630104] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.641047] env[70020]: DEBUG oslo_vmware.api [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618956, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.739016] env[70020]: DEBUG nova.compute.manager [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1096.764522] env[70020]: DEBUG nova.virt.hardware [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1096.764825] env[70020]: DEBUG nova.virt.hardware [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1096.765008] env[70020]: DEBUG nova.virt.hardware [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1096.765274] env[70020]: DEBUG nova.virt.hardware [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1096.765427] env[70020]: DEBUG nova.virt.hardware [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1096.765573] env[70020]: DEBUG nova.virt.hardware [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1096.765800] env[70020]: DEBUG nova.virt.hardware [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1096.765925] env[70020]: DEBUG nova.virt.hardware [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1096.766126] env[70020]: DEBUG nova.virt.hardware [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1096.766468] env[70020]: DEBUG nova.virt.hardware [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1096.766741] env[70020]: DEBUG nova.virt.hardware [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1096.767718] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcd1a3ee-20cd-4c5b-b49f-394f4960d42d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.776477] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e17bd19-4769-44f4-b9e7-cdb7b68606f6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.947533] env[70020]: DEBUG nova.compute.manager [req-adce8ae7-d3e1-4cd4-b058-7cb6e876ad03 req-c8b82501-12ff-4217-97f8-2bc04210d139 service nova] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Received event network-vif-plugged-2573d470-4c75-40c7-9e9b-6130f5e14092 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1096.947758] env[70020]: DEBUG oslo_concurrency.lockutils [req-adce8ae7-d3e1-4cd4-b058-7cb6e876ad03 req-c8b82501-12ff-4217-97f8-2bc04210d139 service nova] Acquiring lock "9e7bd10b-3a78-48d8-9b66-e3646635be6d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1096.947994] env[70020]: DEBUG oslo_concurrency.lockutils [req-adce8ae7-d3e1-4cd4-b058-7cb6e876ad03 req-c8b82501-12ff-4217-97f8-2bc04210d139 service nova] Lock "9e7bd10b-3a78-48d8-9b66-e3646635be6d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1096.948150] env[70020]: DEBUG oslo_concurrency.lockutils [req-adce8ae7-d3e1-4cd4-b058-7cb6e876ad03 req-c8b82501-12ff-4217-97f8-2bc04210d139 service nova] Lock "9e7bd10b-3a78-48d8-9b66-e3646635be6d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1096.948320] env[70020]: DEBUG nova.compute.manager [req-adce8ae7-d3e1-4cd4-b058-7cb6e876ad03 req-c8b82501-12ff-4217-97f8-2bc04210d139 service nova] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] No waiting events found dispatching network-vif-plugged-2573d470-4c75-40c7-9e9b-6130f5e14092 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1096.948478] env[70020]: WARNING nova.compute.manager [req-adce8ae7-d3e1-4cd4-b058-7cb6e876ad03 req-c8b82501-12ff-4217-97f8-2bc04210d139 service nova] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Received unexpected event network-vif-plugged-2573d470-4c75-40c7-9e9b-6130f5e14092 for instance with vm_state building and task_state spawning. [ 1097.015751] env[70020]: DEBUG oslo_vmware.api [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Task: {'id': task-3618953, 'name': PowerOnVM_Task, 'duration_secs': 0.525796} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.016176] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1097.016479] env[70020]: INFO nova.compute.manager [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Took 9.42 seconds to spawn the instance on the hypervisor. [ 1097.016724] env[70020]: DEBUG nova.compute.manager [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1097.017643] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28d2ad1b-e035-4bc7-a6c7-2b1b212e1580 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.039562] env[70020]: DEBUG nova.network.neutron [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Successfully updated port: 2573d470-4c75-40c7-9e9b-6130f5e14092 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1097.096025] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a4547a75-8fb0-4460-be2b-324223a0ea7c tempest-ServersNegativeTestJSON-128962351 tempest-ServersNegativeTestJSON-128962351-project-member] Lock "2198e7f8-5458-4b97-abb3-0a3c932cebc2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.932s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1097.140099] env[70020]: DEBUG oslo_vmware.api [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618956, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145123} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.142652] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1097.142839] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1097.143027] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1097.301310] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-872347b4-98ad-4892-b940-41aab8753183 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.310010] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd512694-8992-4d2d-ae29-a5af0b9ad7ad {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.342546] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6c1eb15-0387-44ec-9242-74ff6956662c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.351608] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52bab455-12b0-40b4-94ef-78ddfff4f27e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.367588] env[70020]: DEBUG nova.compute.provider_tree [None req-49947e67-928d-488e-af87-c580e4249f8b tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1097.538019] env[70020]: INFO nova.compute.manager [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Took 31.96 seconds to build instance. [ 1097.545033] env[70020]: DEBUG oslo_concurrency.lockutils [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquiring lock "refresh_cache-9e7bd10b-3a78-48d8-9b66-e3646635be6d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1097.546129] env[70020]: DEBUG oslo_concurrency.lockutils [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquired lock "refresh_cache-9e7bd10b-3a78-48d8-9b66-e3646635be6d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1097.546129] env[70020]: DEBUG nova.network.neutron [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1097.871224] env[70020]: DEBUG nova.scheduler.client.report [None req-49947e67-928d-488e-af87-c580e4249f8b tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1098.040042] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0f5fbf66-651b-4101-b794-9191f00b2a99 tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Lock "db24c4e0-f778-4488-b9cb-a06b21932b4e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.472s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1098.083857] env[70020]: DEBUG nova.network.neutron [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1098.186739] env[70020]: DEBUG nova.virt.hardware [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1098.186739] env[70020]: DEBUG nova.virt.hardware [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1098.186739] env[70020]: DEBUG nova.virt.hardware [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1098.186739] env[70020]: DEBUG nova.virt.hardware [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1098.186739] env[70020]: DEBUG nova.virt.hardware [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1098.186926] env[70020]: DEBUG nova.virt.hardware [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1098.186956] env[70020]: DEBUG nova.virt.hardware [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1098.187175] env[70020]: DEBUG nova.virt.hardware [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1098.187282] env[70020]: DEBUG nova.virt.hardware [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1098.187460] env[70020]: DEBUG nova.virt.hardware [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1098.187660] env[70020]: DEBUG nova.virt.hardware [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1098.188569] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f018ea5a-11d3-4954-bc4c-651203c1dbf8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.198137] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c4f1861-9105-46b0-b282-cc3c3693fa59 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.212395] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c8:b7:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd6a6f7bb-6106-4cfd-9aef-b85628d0cefa', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '36abe182-e89a-4325-9d00-ce204c53a359', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1098.220095] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1098.222595] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1a09304-7725-489a-8669-322a51c709e5] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1098.222827] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c0532068-4e30-46a0-b7d2-492fc7f79a5f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.247025] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1098.247025] env[70020]: value = "task-3618957" [ 1098.247025] env[70020]: _type = "Task" [ 1098.247025] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.253574] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618957, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.276256] env[70020]: DEBUG nova.network.neutron [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Updating instance_info_cache with network_info: [{"id": "2573d470-4c75-40c7-9e9b-6130f5e14092", "address": "fa:16:3e:bc:56:02", "network": {"id": "5481228b-9a6b-468e-bca7-0123c469ae56", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-340609977-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e7aae0b70f9d465ebcb9defe385fa434", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2573d470-4c", "ovs_interfaceid": "2573d470-4c75-40c7-9e9b-6130f5e14092", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1098.376349] env[70020]: DEBUG oslo_concurrency.lockutils [None req-49947e67-928d-488e-af87-c580e4249f8b tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.810s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1098.378744] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3364ee11-96ea-4a97-94ce-a9cc5c983de4 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.371s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1098.378973] env[70020]: DEBUG nova.objects.instance [None req-3364ee11-96ea-4a97-94ce-a9cc5c983de4 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Lazy-loading 'resources' on Instance uuid 1ddd5a29-075b-482a-a6e9-4c7345673a00 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1098.400528] env[70020]: INFO nova.scheduler.client.report [None req-49947e67-928d-488e-af87-c580e4249f8b tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Deleted allocations for instance a39731d2-0b9b-41fa-b9ac-f80193a26d20 [ 1098.758995] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618957, 'name': CreateVM_Task, 'duration_secs': 0.484673} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.759206] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1a09304-7725-489a-8669-322a51c709e5] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1098.759937] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.760118] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1098.760445] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1098.760716] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3cd2df9e-4339-4997-8a86-f96c48a48350 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.766609] env[70020]: DEBUG oslo_vmware.api [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1098.766609] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]526d6afa-886a-c04e-da8c-674c282601ec" [ 1098.766609] env[70020]: _type = "Task" [ 1098.766609] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.777051] env[70020]: DEBUG oslo_vmware.api [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]526d6afa-886a-c04e-da8c-674c282601ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.784155] env[70020]: DEBUG oslo_concurrency.lockutils [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Releasing lock "refresh_cache-9e7bd10b-3a78-48d8-9b66-e3646635be6d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1098.784155] env[70020]: DEBUG nova.compute.manager [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Instance network_info: |[{"id": "2573d470-4c75-40c7-9e9b-6130f5e14092", "address": "fa:16:3e:bc:56:02", "network": {"id": "5481228b-9a6b-468e-bca7-0123c469ae56", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-340609977-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e7aae0b70f9d465ebcb9defe385fa434", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2573d470-4c", "ovs_interfaceid": "2573d470-4c75-40c7-9e9b-6130f5e14092", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1098.784155] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bc:56:02', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92cdccfd-4b10-4024-b724-5f22792dd4de', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2573d470-4c75-40c7-9e9b-6130f5e14092', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1098.792307] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1098.792552] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1098.792771] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-70abd3df-44f8-4052-bf51-491d5bafe536 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.816042] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1098.816042] env[70020]: value = "task-3618958" [ 1098.816042] env[70020]: _type = "Task" [ 1098.816042] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.826164] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618958, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.911625] env[70020]: DEBUG oslo_concurrency.lockutils [None req-49947e67-928d-488e-af87-c580e4249f8b tempest-ServerRescueTestJSON-1890363448 tempest-ServerRescueTestJSON-1890363448-project-member] Lock "a39731d2-0b9b-41fa-b9ac-f80193a26d20" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.618s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1098.992806] env[70020]: DEBUG nova.compute.manager [req-3d607ea2-548d-451d-a5cc-ba6077b048aa req-a76b89ec-97f7-4735-90c3-48e4f88cc057 service nova] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Received event network-changed-2573d470-4c75-40c7-9e9b-6130f5e14092 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1098.993024] env[70020]: DEBUG nova.compute.manager [req-3d607ea2-548d-451d-a5cc-ba6077b048aa req-a76b89ec-97f7-4735-90c3-48e4f88cc057 service nova] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Refreshing instance network info cache due to event network-changed-2573d470-4c75-40c7-9e9b-6130f5e14092. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1098.993386] env[70020]: DEBUG oslo_concurrency.lockutils [req-3d607ea2-548d-451d-a5cc-ba6077b048aa req-a76b89ec-97f7-4735-90c3-48e4f88cc057 service nova] Acquiring lock "refresh_cache-9e7bd10b-3a78-48d8-9b66-e3646635be6d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.993537] env[70020]: DEBUG oslo_concurrency.lockutils [req-3d607ea2-548d-451d-a5cc-ba6077b048aa req-a76b89ec-97f7-4735-90c3-48e4f88cc057 service nova] Acquired lock "refresh_cache-9e7bd10b-3a78-48d8-9b66-e3646635be6d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1098.993700] env[70020]: DEBUG nova.network.neutron [req-3d607ea2-548d-451d-a5cc-ba6077b048aa req-a76b89ec-97f7-4735-90c3-48e4f88cc057 service nova] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Refreshing network info cache for port 2573d470-4c75-40c7-9e9b-6130f5e14092 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1099.173575] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b685452f-fa22-4ce3-9197-bf502f4e9a88 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.183931] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b467240f-03bd-46fe-9886-f2ebff9fffba {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.224647] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73995b75-4b43-45e9-a955-313f9727181e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.234779] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb4e9f21-880e-47d0-af5d-7a9c42ccd3b0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.252074] env[70020]: DEBUG nova.compute.provider_tree [None req-3364ee11-96ea-4a97-94ce-a9cc5c983de4 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1099.283687] env[70020]: DEBUG oslo_vmware.api [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]526d6afa-886a-c04e-da8c-674c282601ec, 'name': SearchDatastore_Task, 'duration_secs': 0.013756} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.284228] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1099.284671] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1099.285137] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.285549] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1099.286154] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1099.286624] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c1e3ba7f-c047-4734-8118-b1330b01f601 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.311022] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1099.311022] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1099.311022] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6fd424c4-ca0c-4c5a-a953-d9467dfac579 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.320018] env[70020]: DEBUG oslo_vmware.api [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1099.320018] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]525b9ecf-ef35-474b-87a2-7aa0e9fd68bb" [ 1099.320018] env[70020]: _type = "Task" [ 1099.320018] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.331900] env[70020]: DEBUG oslo_vmware.api [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]525b9ecf-ef35-474b-87a2-7aa0e9fd68bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.337068] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618958, 'name': CreateVM_Task, 'duration_secs': 0.367614} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.338239] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1099.338550] env[70020]: DEBUG oslo_concurrency.lockutils [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.338904] env[70020]: DEBUG oslo_concurrency.lockutils [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1099.340247] env[70020]: DEBUG oslo_concurrency.lockutils [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1099.340247] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01389c67-e04a-4bf8-b9c4-491241f7ec61 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.347529] env[70020]: DEBUG oslo_vmware.api [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1099.347529] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52cbc3de-2975-1dcd-84ae-a8aa17ac277f" [ 1099.347529] env[70020]: _type = "Task" [ 1099.347529] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.360048] env[70020]: DEBUG oslo_vmware.api [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52cbc3de-2975-1dcd-84ae-a8aa17ac277f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.755853] env[70020]: DEBUG nova.scheduler.client.report [None req-3364ee11-96ea-4a97-94ce-a9cc5c983de4 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1099.827321] env[70020]: DEBUG nova.network.neutron [req-3d607ea2-548d-451d-a5cc-ba6077b048aa req-a76b89ec-97f7-4735-90c3-48e4f88cc057 service nova] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Updated VIF entry in instance network info cache for port 2573d470-4c75-40c7-9e9b-6130f5e14092. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1099.827720] env[70020]: DEBUG nova.network.neutron [req-3d607ea2-548d-451d-a5cc-ba6077b048aa req-a76b89ec-97f7-4735-90c3-48e4f88cc057 service nova] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Updating instance_info_cache with network_info: [{"id": "2573d470-4c75-40c7-9e9b-6130f5e14092", "address": "fa:16:3e:bc:56:02", "network": {"id": "5481228b-9a6b-468e-bca7-0123c469ae56", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-340609977-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e7aae0b70f9d465ebcb9defe385fa434", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2573d470-4c", "ovs_interfaceid": "2573d470-4c75-40c7-9e9b-6130f5e14092", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.835952] env[70020]: DEBUG oslo_vmware.api [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]525b9ecf-ef35-474b-87a2-7aa0e9fd68bb, 'name': SearchDatastore_Task, 'duration_secs': 0.030146} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.836893] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b2a88dc-6a3b-4cce-be55-9840c87e7f28 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.843291] env[70020]: DEBUG oslo_vmware.api [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1099.843291] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52961c16-8b0f-1240-caff-b5eaf11ff79b" [ 1099.843291] env[70020]: _type = "Task" [ 1099.843291] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.854570] env[70020]: DEBUG oslo_vmware.api [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52961c16-8b0f-1240-caff-b5eaf11ff79b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.860357] env[70020]: DEBUG oslo_vmware.api [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52cbc3de-2975-1dcd-84ae-a8aa17ac277f, 'name': SearchDatastore_Task, 'duration_secs': 0.029053} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.860690] env[70020]: DEBUG oslo_concurrency.lockutils [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1099.860970] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1099.861245] env[70020]: DEBUG oslo_concurrency.lockutils [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1100.211295] env[70020]: DEBUG oslo_concurrency.lockutils [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Acquiring lock "db24c4e0-f778-4488-b9cb-a06b21932b4e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1100.211569] env[70020]: DEBUG oslo_concurrency.lockutils [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Lock "db24c4e0-f778-4488-b9cb-a06b21932b4e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1100.212040] env[70020]: DEBUG oslo_concurrency.lockutils [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Acquiring lock "db24c4e0-f778-4488-b9cb-a06b21932b4e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1100.212040] env[70020]: DEBUG oslo_concurrency.lockutils [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Lock "db24c4e0-f778-4488-b9cb-a06b21932b4e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1100.212169] env[70020]: DEBUG oslo_concurrency.lockutils [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Lock "db24c4e0-f778-4488-b9cb-a06b21932b4e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1100.214244] env[70020]: INFO nova.compute.manager [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Terminating instance [ 1100.261906] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3364ee11-96ea-4a97-94ce-a9cc5c983de4 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.883s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1100.263886] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.832s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1100.265494] env[70020]: INFO nova.compute.claims [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1100.285051] env[70020]: INFO nova.scheduler.client.report [None req-3364ee11-96ea-4a97-94ce-a9cc5c983de4 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Deleted allocations for instance 1ddd5a29-075b-482a-a6e9-4c7345673a00 [ 1100.330442] env[70020]: DEBUG oslo_concurrency.lockutils [req-3d607ea2-548d-451d-a5cc-ba6077b048aa req-a76b89ec-97f7-4735-90c3-48e4f88cc057 service nova] Releasing lock "refresh_cache-9e7bd10b-3a78-48d8-9b66-e3646635be6d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1100.353791] env[70020]: DEBUG oslo_vmware.api [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52961c16-8b0f-1240-caff-b5eaf11ff79b, 'name': SearchDatastore_Task, 'duration_secs': 0.011038} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.354070] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1100.354327] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] f1a09304-7725-489a-8669-322a51c709e5/f1a09304-7725-489a-8669-322a51c709e5.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1100.354602] env[70020]: DEBUG oslo_concurrency.lockutils [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1100.354782] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1100.354994] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a1a01ec3-eb0c-4298-b11c-f68374631923 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.356909] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dad94474-0501-43ec-8320-a1fdbe13dd8d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.366994] env[70020]: DEBUG oslo_vmware.api [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1100.366994] env[70020]: value = "task-3618959" [ 1100.366994] env[70020]: _type = "Task" [ 1100.366994] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.368230] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1100.368566] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1100.372097] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4c96fa9-4ce4-4dd5-9525-7841f898cd6d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.378568] env[70020]: DEBUG oslo_vmware.api [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1100.378568] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52efa67f-24ca-15c4-091b-058ded4b5185" [ 1100.378568] env[70020]: _type = "Task" [ 1100.378568] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.381679] env[70020]: DEBUG oslo_vmware.api [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618959, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.391017] env[70020]: DEBUG oslo_vmware.api [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52efa67f-24ca-15c4-091b-058ded4b5185, 'name': SearchDatastore_Task, 'duration_secs': 0.010768} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.391797] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa647445-a783-4d7b-a8f3-cdad0b9e13a3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.398251] env[70020]: DEBUG oslo_vmware.api [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1100.398251] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5205e5ca-f872-6df7-d625-935de6f3398f" [ 1100.398251] env[70020]: _type = "Task" [ 1100.398251] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.408035] env[70020]: DEBUG oslo_vmware.api [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5205e5ca-f872-6df7-d625-935de6f3398f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.717944] env[70020]: DEBUG nova.compute.manager [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1100.718201] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1100.719120] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c357905f-ae63-415b-825f-745c4d446740 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.728516] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1100.728832] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cdd46715-cbd4-46ae-bb47-6ca05f2fbb3f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.743779] env[70020]: DEBUG oslo_vmware.api [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Waiting for the task: (returnval){ [ 1100.743779] env[70020]: value = "task-3618960" [ 1100.743779] env[70020]: _type = "Task" [ 1100.743779] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.755101] env[70020]: DEBUG oslo_vmware.api [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Task: {'id': task-3618960, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.794519] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3364ee11-96ea-4a97-94ce-a9cc5c983de4 tempest-ServerShowV257Test-557408790 tempest-ServerShowV257Test-557408790-project-member] Lock "1ddd5a29-075b-482a-a6e9-4c7345673a00" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.033s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1100.880076] env[70020]: DEBUG oslo_vmware.api [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618959, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.910543] env[70020]: DEBUG oslo_vmware.api [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5205e5ca-f872-6df7-d625-935de6f3398f, 'name': SearchDatastore_Task, 'duration_secs': 0.012059} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.913141] env[70020]: DEBUG oslo_concurrency.lockutils [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1100.913141] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 9e7bd10b-3a78-48d8-9b66-e3646635be6d/9e7bd10b-3a78-48d8-9b66-e3646635be6d.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1100.913141] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9b487081-945a-465f-b218-fb09ee6d2936 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.923033] env[70020]: DEBUG oslo_vmware.api [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1100.923033] env[70020]: value = "task-3618961" [ 1100.923033] env[70020]: _type = "Task" [ 1100.923033] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.937642] env[70020]: DEBUG oslo_vmware.api [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618961, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.256321] env[70020]: DEBUG oslo_vmware.api [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Task: {'id': task-3618960, 'name': PowerOffVM_Task, 'duration_secs': 0.245908} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.257207] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1101.257207] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1101.257207] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e4c7e545-7efc-4f79-b56c-4d4ff36ce8cc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.330563] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1101.330802] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1101.330983] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Deleting the datastore file [datastore1] db24c4e0-f778-4488-b9cb-a06b21932b4e {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1101.331266] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-56a78a45-89ed-4e06-a135-1e12fd6f25fe {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.339962] env[70020]: DEBUG oslo_vmware.api [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Waiting for the task: (returnval){ [ 1101.339962] env[70020]: value = "task-3618963" [ 1101.339962] env[70020]: _type = "Task" [ 1101.339962] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.351857] env[70020]: DEBUG oslo_vmware.api [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Task: {'id': task-3618963, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.386375] env[70020]: DEBUG oslo_vmware.api [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618959, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.525105} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.386671] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] f1a09304-7725-489a-8669-322a51c709e5/f1a09304-7725-489a-8669-322a51c709e5.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1101.386886] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1101.387153] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6c18e51b-3259-4b76-bd69-1b8e064bcae6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.394639] env[70020]: DEBUG oslo_vmware.api [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1101.394639] env[70020]: value = "task-3618964" [ 1101.394639] env[70020]: _type = "Task" [ 1101.394639] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.405355] env[70020]: DEBUG oslo_vmware.api [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618964, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.432339] env[70020]: DEBUG oslo_vmware.api [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618961, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.453489} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.435035] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 9e7bd10b-3a78-48d8-9b66-e3646635be6d/9e7bd10b-3a78-48d8-9b66-e3646635be6d.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1101.435318] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1101.435761] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-27d226cd-273d-4e95-a278-13ec1369434e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.445238] env[70020]: DEBUG oslo_vmware.api [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1101.445238] env[70020]: value = "task-3618965" [ 1101.445238] env[70020]: _type = "Task" [ 1101.445238] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.459969] env[70020]: DEBUG oslo_vmware.api [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618965, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.507505] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ad1c6cc-8c16-4857-8024-3c701173a15f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.515161] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c02000-f43d-4f67-bc16-1f672bd872ca {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.546210] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e67d814a-a0c4-49be-84fd-d1074d344c93 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.553221] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01572955-c25c-4532-8b1f-535294d93827 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.566437] env[70020]: DEBUG nova.compute.provider_tree [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1101.857039] env[70020]: DEBUG oslo_vmware.api [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Task: {'id': task-3618963, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157443} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.857039] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1101.857039] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1101.857221] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1101.857358] env[70020]: INFO nova.compute.manager [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1101.857695] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1101.857969] env[70020]: DEBUG nova.compute.manager [-] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1101.858126] env[70020]: DEBUG nova.network.neutron [-] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1101.907252] env[70020]: DEBUG oslo_vmware.api [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618964, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063623} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.907392] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1101.908464] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cb79341-3afb-46e6-a3a4-df09272e5201 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.944567] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] f1a09304-7725-489a-8669-322a51c709e5/f1a09304-7725-489a-8669-322a51c709e5.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1101.945030] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-022a22b8-b71a-4d2f-b78a-59cd1b379af3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.975079] env[70020]: DEBUG oslo_vmware.api [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618965, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071226} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.975820] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1101.976638] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-375f2f89-452c-4388-bd0a-11e4d1efbcce {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.979957] env[70020]: DEBUG oslo_vmware.api [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1101.979957] env[70020]: value = "task-3618966" [ 1101.979957] env[70020]: _type = "Task" [ 1101.979957] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.000780] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Reconfiguring VM instance instance-00000068 to attach disk [datastore2] 9e7bd10b-3a78-48d8-9b66-e3646635be6d/9e7bd10b-3a78-48d8-9b66-e3646635be6d.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1102.001679] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-52d5fb41-1696-4499-963f-37200699cd67 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.020028] env[70020]: DEBUG oslo_vmware.api [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618966, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.026277] env[70020]: DEBUG oslo_vmware.api [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1102.026277] env[70020]: value = "task-3618967" [ 1102.026277] env[70020]: _type = "Task" [ 1102.026277] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.035859] env[70020]: DEBUG oslo_vmware.api [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618967, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.072021] env[70020]: DEBUG nova.scheduler.client.report [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1102.106329] env[70020]: DEBUG nova.compute.manager [req-2758929c-2ee1-4c73-b1f8-f4294eb6b423 req-cf0ce150-0b35-4ba0-9285-8283b9decc0b service nova] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Received event network-vif-deleted-54da9b83-48b2-4a5a-b141-d4c02fe9fdb8 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1102.106455] env[70020]: INFO nova.compute.manager [req-2758929c-2ee1-4c73-b1f8-f4294eb6b423 req-cf0ce150-0b35-4ba0-9285-8283b9decc0b service nova] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Neutron deleted interface 54da9b83-48b2-4a5a-b141-d4c02fe9fdb8; detaching it from the instance and deleting it from the info cache [ 1102.106624] env[70020]: DEBUG nova.network.neutron [req-2758929c-2ee1-4c73-b1f8-f4294eb6b423 req-cf0ce150-0b35-4ba0-9285-8283b9decc0b service nova] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1102.490986] env[70020]: DEBUG oslo_vmware.api [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618966, 'name': ReconfigVM_Task, 'duration_secs': 0.312921} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.491377] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Reconfigured VM instance instance-00000066 to attach disk [datastore2] f1a09304-7725-489a-8669-322a51c709e5/f1a09304-7725-489a-8669-322a51c709e5.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1102.491894] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-27ccc827-c791-4a5b-95fa-36bea573da74 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.497998] env[70020]: DEBUG oslo_vmware.api [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1102.497998] env[70020]: value = "task-3618968" [ 1102.497998] env[70020]: _type = "Task" [ 1102.497998] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.505369] env[70020]: DEBUG oslo_vmware.api [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618968, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.535186] env[70020]: DEBUG oslo_vmware.api [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618967, 'name': ReconfigVM_Task, 'duration_secs': 0.292032} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.535498] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Reconfigured VM instance instance-00000068 to attach disk [datastore2] 9e7bd10b-3a78-48d8-9b66-e3646635be6d/9e7bd10b-3a78-48d8-9b66-e3646635be6d.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1102.536160] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d647b054-0e7b-4a08-a24f-297ceb55e84d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.543108] env[70020]: DEBUG oslo_vmware.api [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1102.543108] env[70020]: value = "task-3618969" [ 1102.543108] env[70020]: _type = "Task" [ 1102.543108] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.550992] env[70020]: DEBUG oslo_vmware.api [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618969, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.575018] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.311s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1102.575619] env[70020]: DEBUG nova.compute.manager [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1102.578509] env[70020]: DEBUG oslo_concurrency.lockutils [None req-298e0ede-04d2-47dd-ac28-8e14e0155af4 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.713s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1102.578628] env[70020]: DEBUG nova.objects.instance [None req-298e0ede-04d2-47dd-ac28-8e14e0155af4 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lazy-loading 'resources' on Instance uuid 9d1568bf-4027-4d4c-b089-276006eee715 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1102.587124] env[70020]: DEBUG nova.network.neutron [-] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1102.609579] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6bc17e07-6683-45c7-9824-aba84d3b3762 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.621266] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e149de9-193c-44b4-a893-6b0715532b9d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.656494] env[70020]: DEBUG nova.compute.manager [req-2758929c-2ee1-4c73-b1f8-f4294eb6b423 req-cf0ce150-0b35-4ba0-9285-8283b9decc0b service nova] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Detach interface failed, port_id=54da9b83-48b2-4a5a-b141-d4c02fe9fdb8, reason: Instance db24c4e0-f778-4488-b9cb-a06b21932b4e could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1103.008275] env[70020]: DEBUG oslo_vmware.api [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618968, 'name': Rename_Task, 'duration_secs': 0.155815} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.008547] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1103.008781] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-282eda63-2e35-434b-99e7-fd593f7a3c04 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.015619] env[70020]: DEBUG oslo_vmware.api [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1103.015619] env[70020]: value = "task-3618970" [ 1103.015619] env[70020]: _type = "Task" [ 1103.015619] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.025723] env[70020]: DEBUG oslo_vmware.api [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618970, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.053301] env[70020]: DEBUG oslo_vmware.api [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618969, 'name': Rename_Task, 'duration_secs': 0.148256} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.053657] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1103.053932] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b55b71fa-4f19-4439-a6ef-9f0a03120a40 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.059419] env[70020]: DEBUG oslo_vmware.api [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1103.059419] env[70020]: value = "task-3618971" [ 1103.059419] env[70020]: _type = "Task" [ 1103.059419] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.066419] env[70020]: DEBUG oslo_vmware.api [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618971, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.081969] env[70020]: DEBUG nova.compute.utils [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1103.086148] env[70020]: DEBUG nova.compute.manager [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1103.086303] env[70020]: DEBUG nova.network.neutron [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1103.089363] env[70020]: INFO nova.compute.manager [-] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Took 1.23 seconds to deallocate network for instance. [ 1103.136212] env[70020]: DEBUG nova.policy [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '33279b0a8dc848ceb443776f840845c3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '16f59a8f930846ec9299416b9ec5dd48', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1103.297218] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79fa81c9-6644-4dce-95a2-5a20a576c34c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.305322] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49193f8c-f68e-4e22-8335-ca2a0dd5d238 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.337160] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e52e6928-9396-458c-9dd3-0fafe74dec79 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.344443] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7212dfdc-618d-4a66-bce4-3b95b1de3555 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.358528] env[70020]: DEBUG nova.compute.provider_tree [None req-298e0ede-04d2-47dd-ac28-8e14e0155af4 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1103.422186] env[70020]: DEBUG nova.network.neutron [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Successfully created port: 8cef7e32-bdf1-41df-be67-ab80c6f894de {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1103.526620] env[70020]: DEBUG oslo_vmware.api [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618970, 'name': PowerOnVM_Task, 'duration_secs': 0.479365} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.526968] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1103.527371] env[70020]: DEBUG nova.compute.manager [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1103.528286] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98ed2077-2405-4776-8635-eb56237ab648 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.574351] env[70020]: DEBUG oslo_vmware.api [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3618971, 'name': PowerOnVM_Task, 'duration_secs': 0.481781} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.575313] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1103.575533] env[70020]: INFO nova.compute.manager [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Took 6.84 seconds to spawn the instance on the hypervisor. [ 1103.575689] env[70020]: DEBUG nova.compute.manager [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1103.576505] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b5159b3-1cb4-4ddd-bf46-4001ea66ae39 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.586526] env[70020]: DEBUG nova.compute.manager [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1103.596037] env[70020]: DEBUG oslo_concurrency.lockutils [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1103.862818] env[70020]: DEBUG nova.scheduler.client.report [None req-298e0ede-04d2-47dd-ac28-8e14e0155af4 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1104.045918] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1104.100450] env[70020]: INFO nova.compute.manager [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Took 31.86 seconds to build instance. [ 1104.367431] env[70020]: DEBUG oslo_concurrency.lockutils [None req-298e0ede-04d2-47dd-ac28-8e14e0155af4 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.789s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1104.370335] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f8ed5a56-f322-4e4a-afe3-91d1ddb14bfe tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 12.279s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1104.391174] env[70020]: INFO nova.scheduler.client.report [None req-298e0ede-04d2-47dd-ac28-8e14e0155af4 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Deleted allocations for instance 9d1568bf-4027-4d4c-b089-276006eee715 [ 1104.524808] env[70020]: DEBUG nova.compute.manager [req-016cbac7-5a6e-43e9-8c78-1c833961da09 req-9bc0f4c7-873b-47e2-8e4b-abdf04e8e25e service nova] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Received event network-changed-2573d470-4c75-40c7-9e9b-6130f5e14092 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1104.525009] env[70020]: DEBUG nova.compute.manager [req-016cbac7-5a6e-43e9-8c78-1c833961da09 req-9bc0f4c7-873b-47e2-8e4b-abdf04e8e25e service nova] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Refreshing instance network info cache due to event network-changed-2573d470-4c75-40c7-9e9b-6130f5e14092. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1104.525590] env[70020]: DEBUG oslo_concurrency.lockutils [req-016cbac7-5a6e-43e9-8c78-1c833961da09 req-9bc0f4c7-873b-47e2-8e4b-abdf04e8e25e service nova] Acquiring lock "refresh_cache-9e7bd10b-3a78-48d8-9b66-e3646635be6d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1104.526143] env[70020]: DEBUG oslo_concurrency.lockutils [req-016cbac7-5a6e-43e9-8c78-1c833961da09 req-9bc0f4c7-873b-47e2-8e4b-abdf04e8e25e service nova] Acquired lock "refresh_cache-9e7bd10b-3a78-48d8-9b66-e3646635be6d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1104.526143] env[70020]: DEBUG nova.network.neutron [req-016cbac7-5a6e-43e9-8c78-1c833961da09 req-9bc0f4c7-873b-47e2-8e4b-abdf04e8e25e service nova] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Refreshing network info cache for port 2573d470-4c75-40c7-9e9b-6130f5e14092 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1104.599891] env[70020]: DEBUG nova.compute.manager [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1104.602870] env[70020]: DEBUG oslo_concurrency.lockutils [None req-938020f6-2bae-43f8-88a3-b7caad11707a tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lock "9e7bd10b-3a78-48d8-9b66-e3646635be6d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.372s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1104.632123] env[70020]: DEBUG nova.virt.hardware [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1104.632370] env[70020]: DEBUG nova.virt.hardware [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1104.632528] env[70020]: DEBUG nova.virt.hardware [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1104.632709] env[70020]: DEBUG nova.virt.hardware [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1104.632851] env[70020]: DEBUG nova.virt.hardware [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1104.632997] env[70020]: DEBUG nova.virt.hardware [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1104.633220] env[70020]: DEBUG nova.virt.hardware [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1104.633381] env[70020]: DEBUG nova.virt.hardware [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1104.633548] env[70020]: DEBUG nova.virt.hardware [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1104.633707] env[70020]: DEBUG nova.virt.hardware [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1104.633878] env[70020]: DEBUG nova.virt.hardware [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1104.634833] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c147c8da-416c-4934-991c-3743ae1efb86 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.642653] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-044e53ea-7652-4111-a396-dbd811a86ca1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.900017] env[70020]: DEBUG oslo_concurrency.lockutils [None req-298e0ede-04d2-47dd-ac28-8e14e0155af4 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "9d1568bf-4027-4d4c-b089-276006eee715" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.533s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1104.932383] env[70020]: DEBUG nova.network.neutron [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Successfully updated port: 8cef7e32-bdf1-41df-be67-ab80c6f894de {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1105.096220] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2f773bd-03ce-4e5e-aa7f-fd8cdd1c0dd0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.104876] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31a2a726-8744-4597-8c9c-4a8eeb870f1f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.138516] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aeb38a3-bf69-4e91-b387-f837548709c4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.146077] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-877dd2a5-e8be-4315-a193-e5f2cfce88be {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.161021] env[70020]: DEBUG nova.compute.provider_tree [None req-f8ed5a56-f322-4e4a-afe3-91d1ddb14bfe tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1105.250937] env[70020]: DEBUG nova.network.neutron [req-016cbac7-5a6e-43e9-8c78-1c833961da09 req-9bc0f4c7-873b-47e2-8e4b-abdf04e8e25e service nova] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Updated VIF entry in instance network info cache for port 2573d470-4c75-40c7-9e9b-6130f5e14092. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1105.251331] env[70020]: DEBUG nova.network.neutron [req-016cbac7-5a6e-43e9-8c78-1c833961da09 req-9bc0f4c7-873b-47e2-8e4b-abdf04e8e25e service nova] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Updating instance_info_cache with network_info: [{"id": "2573d470-4c75-40c7-9e9b-6130f5e14092", "address": "fa:16:3e:bc:56:02", "network": {"id": "5481228b-9a6b-468e-bca7-0123c469ae56", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-340609977-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.161", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e7aae0b70f9d465ebcb9defe385fa434", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2573d470-4c", "ovs_interfaceid": "2573d470-4c75-40c7-9e9b-6130f5e14092", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1105.358085] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1dfbf0a8-6d74-40ac-b24f-b88a6fa08203 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "f1a09304-7725-489a-8669-322a51c709e5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1105.358361] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1dfbf0a8-6d74-40ac-b24f-b88a6fa08203 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "f1a09304-7725-489a-8669-322a51c709e5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1105.358576] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1dfbf0a8-6d74-40ac-b24f-b88a6fa08203 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "f1a09304-7725-489a-8669-322a51c709e5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1105.358761] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1dfbf0a8-6d74-40ac-b24f-b88a6fa08203 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "f1a09304-7725-489a-8669-322a51c709e5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1105.359037] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1dfbf0a8-6d74-40ac-b24f-b88a6fa08203 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "f1a09304-7725-489a-8669-322a51c709e5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1105.361444] env[70020]: INFO nova.compute.manager [None req-1dfbf0a8-6d74-40ac-b24f-b88a6fa08203 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Terminating instance [ 1105.367221] env[70020]: DEBUG oslo_concurrency.lockutils [None req-976cd10e-5e5c-48a1-907c-a832163549cc tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "0453722d-258f-49e3-b61e-f1081eb465c6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1105.367536] env[70020]: DEBUG oslo_concurrency.lockutils [None req-976cd10e-5e5c-48a1-907c-a832163549cc tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "0453722d-258f-49e3-b61e-f1081eb465c6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1105.367814] env[70020]: DEBUG oslo_concurrency.lockutils [None req-976cd10e-5e5c-48a1-907c-a832163549cc tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "0453722d-258f-49e3-b61e-f1081eb465c6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1105.368027] env[70020]: DEBUG oslo_concurrency.lockutils [None req-976cd10e-5e5c-48a1-907c-a832163549cc tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "0453722d-258f-49e3-b61e-f1081eb465c6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1105.368200] env[70020]: DEBUG oslo_concurrency.lockutils [None req-976cd10e-5e5c-48a1-907c-a832163549cc tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "0453722d-258f-49e3-b61e-f1081eb465c6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1105.370282] env[70020]: INFO nova.compute.manager [None req-976cd10e-5e5c-48a1-907c-a832163549cc tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Terminating instance [ 1105.434914] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "refresh_cache-f9d4837f-0e3f-4a83-9055-04d17ef3eb23" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1105.435058] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquired lock "refresh_cache-f9d4837f-0e3f-4a83-9055-04d17ef3eb23" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1105.435254] env[70020]: DEBUG nova.network.neutron [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1105.666579] env[70020]: DEBUG nova.scheduler.client.report [None req-f8ed5a56-f322-4e4a-afe3-91d1ddb14bfe tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1105.753989] env[70020]: DEBUG oslo_concurrency.lockutils [req-016cbac7-5a6e-43e9-8c78-1c833961da09 req-9bc0f4c7-873b-47e2-8e4b-abdf04e8e25e service nova] Releasing lock "refresh_cache-9e7bd10b-3a78-48d8-9b66-e3646635be6d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1105.865800] env[70020]: DEBUG nova.compute.manager [None req-1dfbf0a8-6d74-40ac-b24f-b88a6fa08203 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1105.866079] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-1dfbf0a8-6d74-40ac-b24f-b88a6fa08203 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1105.867182] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3236171-5667-4431-bc36-da37083b9263 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.876933] env[70020]: DEBUG nova.compute.manager [None req-976cd10e-5e5c-48a1-907c-a832163549cc tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1105.877220] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-976cd10e-5e5c-48a1-907c-a832163549cc tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1105.877629] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-1dfbf0a8-6d74-40ac-b24f-b88a6fa08203 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1105.878650] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68290dc6-64c2-45d9-bd64-6255084e0cb9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.882269] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-40fe6784-4a15-4953-bd0d-5918d35a9402 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.888324] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-976cd10e-5e5c-48a1-907c-a832163549cc tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1105.889400] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-40ec1bbe-8860-401d-be02-6367a75bc73d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.890727] env[70020]: DEBUG oslo_vmware.api [None req-1dfbf0a8-6d74-40ac-b24f-b88a6fa08203 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1105.890727] env[70020]: value = "task-3618972" [ 1105.890727] env[70020]: _type = "Task" [ 1105.890727] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.895732] env[70020]: DEBUG oslo_vmware.api [None req-976cd10e-5e5c-48a1-907c-a832163549cc tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1105.895732] env[70020]: value = "task-3618973" [ 1105.895732] env[70020]: _type = "Task" [ 1105.895732] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.898682] env[70020]: DEBUG oslo_vmware.api [None req-1dfbf0a8-6d74-40ac-b24f-b88a6fa08203 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618972, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.906746] env[70020]: DEBUG oslo_vmware.api [None req-976cd10e-5e5c-48a1-907c-a832163549cc tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618973, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.928024] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "ce4796b0-4ad2-4468-9898-aaedce6dcd32" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1105.928257] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "ce4796b0-4ad2-4468-9898-aaedce6dcd32" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1105.978277] env[70020]: DEBUG nova.network.neutron [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1106.160753] env[70020]: DEBUG nova.network.neutron [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Updating instance_info_cache with network_info: [{"id": "8cef7e32-bdf1-41df-be67-ab80c6f894de", "address": "fa:16:3e:70:3d:f6", "network": {"id": "405de05e-83d0-46c5-9397-bb1ba00f7ab7", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-202335876-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16f59a8f930846ec9299416b9ec5dd48", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8cef7e32-bd", "ovs_interfaceid": "8cef7e32-bdf1-41df-be67-ab80c6f894de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1106.400947] env[70020]: DEBUG oslo_vmware.api [None req-1dfbf0a8-6d74-40ac-b24f-b88a6fa08203 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618972, 'name': PowerOffVM_Task, 'duration_secs': 0.267125} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.403980] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-1dfbf0a8-6d74-40ac-b24f-b88a6fa08203 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1106.404264] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-1dfbf0a8-6d74-40ac-b24f-b88a6fa08203 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1106.404429] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9fabbc0b-500b-427e-8e42-79d30651b251 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.411900] env[70020]: DEBUG oslo_vmware.api [None req-976cd10e-5e5c-48a1-907c-a832163549cc tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618973, 'name': PowerOffVM_Task, 'duration_secs': 0.198809} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.412164] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-976cd10e-5e5c-48a1-907c-a832163549cc tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1106.412329] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-976cd10e-5e5c-48a1-907c-a832163549cc tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1106.412551] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-809474e1-7f4b-4e4a-bb94-1086637343ae {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.430324] env[70020]: DEBUG nova.compute.manager [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1106.485498] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-976cd10e-5e5c-48a1-907c-a832163549cc tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1106.485498] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-976cd10e-5e5c-48a1-907c-a832163549cc tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1106.485978] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-976cd10e-5e5c-48a1-907c-a832163549cc tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Deleting the datastore file [datastore2] 0453722d-258f-49e3-b61e-f1081eb465c6 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1106.485978] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-032ce93e-8237-49a2-b8d2-21d80eeacb97 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.494350] env[70020]: DEBUG oslo_vmware.api [None req-976cd10e-5e5c-48a1-907c-a832163549cc tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1106.494350] env[70020]: value = "task-3618976" [ 1106.494350] env[70020]: _type = "Task" [ 1106.494350] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.498681] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-1dfbf0a8-6d74-40ac-b24f-b88a6fa08203 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1106.498875] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-1dfbf0a8-6d74-40ac-b24f-b88a6fa08203 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1106.499064] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-1dfbf0a8-6d74-40ac-b24f-b88a6fa08203 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Deleting the datastore file [datastore2] f1a09304-7725-489a-8669-322a51c709e5 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1106.499653] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-594ed010-c239-461f-992f-e66893f73aaf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.504855] env[70020]: DEBUG oslo_vmware.api [None req-976cd10e-5e5c-48a1-907c-a832163549cc tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618976, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.508835] env[70020]: DEBUG oslo_vmware.api [None req-1dfbf0a8-6d74-40ac-b24f-b88a6fa08203 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1106.508835] env[70020]: value = "task-3618977" [ 1106.508835] env[70020]: _type = "Task" [ 1106.508835] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.516739] env[70020]: DEBUG oslo_vmware.api [None req-1dfbf0a8-6d74-40ac-b24f-b88a6fa08203 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618977, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.552439] env[70020]: DEBUG nova.compute.manager [req-cf2cc43f-d9a6-467b-97b1-fc0b764ac023 req-9ded5850-99c8-4143-8ff6-3ce9980b79c6 service nova] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Received event network-vif-plugged-8cef7e32-bdf1-41df-be67-ab80c6f894de {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1106.552439] env[70020]: DEBUG oslo_concurrency.lockutils [req-cf2cc43f-d9a6-467b-97b1-fc0b764ac023 req-9ded5850-99c8-4143-8ff6-3ce9980b79c6 service nova] Acquiring lock "f9d4837f-0e3f-4a83-9055-04d17ef3eb23-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1106.552439] env[70020]: DEBUG oslo_concurrency.lockutils [req-cf2cc43f-d9a6-467b-97b1-fc0b764ac023 req-9ded5850-99c8-4143-8ff6-3ce9980b79c6 service nova] Lock "f9d4837f-0e3f-4a83-9055-04d17ef3eb23-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1106.552439] env[70020]: DEBUG oslo_concurrency.lockutils [req-cf2cc43f-d9a6-467b-97b1-fc0b764ac023 req-9ded5850-99c8-4143-8ff6-3ce9980b79c6 service nova] Lock "f9d4837f-0e3f-4a83-9055-04d17ef3eb23-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1106.552439] env[70020]: DEBUG nova.compute.manager [req-cf2cc43f-d9a6-467b-97b1-fc0b764ac023 req-9ded5850-99c8-4143-8ff6-3ce9980b79c6 service nova] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] No waiting events found dispatching network-vif-plugged-8cef7e32-bdf1-41df-be67-ab80c6f894de {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1106.552439] env[70020]: WARNING nova.compute.manager [req-cf2cc43f-d9a6-467b-97b1-fc0b764ac023 req-9ded5850-99c8-4143-8ff6-3ce9980b79c6 service nova] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Received unexpected event network-vif-plugged-8cef7e32-bdf1-41df-be67-ab80c6f894de for instance with vm_state building and task_state spawning. [ 1106.552439] env[70020]: DEBUG nova.compute.manager [req-cf2cc43f-d9a6-467b-97b1-fc0b764ac023 req-9ded5850-99c8-4143-8ff6-3ce9980b79c6 service nova] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Received event network-changed-8cef7e32-bdf1-41df-be67-ab80c6f894de {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1106.552718] env[70020]: DEBUG nova.compute.manager [req-cf2cc43f-d9a6-467b-97b1-fc0b764ac023 req-9ded5850-99c8-4143-8ff6-3ce9980b79c6 service nova] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Refreshing instance network info cache due to event network-changed-8cef7e32-bdf1-41df-be67-ab80c6f894de. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1106.552718] env[70020]: DEBUG oslo_concurrency.lockutils [req-cf2cc43f-d9a6-467b-97b1-fc0b764ac023 req-9ded5850-99c8-4143-8ff6-3ce9980b79c6 service nova] Acquiring lock "refresh_cache-f9d4837f-0e3f-4a83-9055-04d17ef3eb23" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1106.663111] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Releasing lock "refresh_cache-f9d4837f-0e3f-4a83-9055-04d17ef3eb23" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1106.663468] env[70020]: DEBUG nova.compute.manager [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Instance network_info: |[{"id": "8cef7e32-bdf1-41df-be67-ab80c6f894de", "address": "fa:16:3e:70:3d:f6", "network": {"id": "405de05e-83d0-46c5-9397-bb1ba00f7ab7", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-202335876-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16f59a8f930846ec9299416b9ec5dd48", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8cef7e32-bd", "ovs_interfaceid": "8cef7e32-bdf1-41df-be67-ab80c6f894de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1106.663821] env[70020]: DEBUG oslo_concurrency.lockutils [req-cf2cc43f-d9a6-467b-97b1-fc0b764ac023 req-9ded5850-99c8-4143-8ff6-3ce9980b79c6 service nova] Acquired lock "refresh_cache-f9d4837f-0e3f-4a83-9055-04d17ef3eb23" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1106.664056] env[70020]: DEBUG nova.network.neutron [req-cf2cc43f-d9a6-467b-97b1-fc0b764ac023 req-9ded5850-99c8-4143-8ff6-3ce9980b79c6 service nova] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Refreshing network info cache for port 8cef7e32-bdf1-41df-be67-ab80c6f894de {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1106.665450] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:70:3d:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f01bbee7-8b9a-46be-891e-59d8142fb359', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8cef7e32-bdf1-41df-be67-ab80c6f894de', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1106.673626] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1106.674656] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1106.674887] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cd00b723-329f-4901-acc3-4a742869bef0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.690069] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f8ed5a56-f322-4e4a-afe3-91d1ddb14bfe tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.320s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1106.693198] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b38451ea-4e47-44d0-b69e-c3b331736001 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.842s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1106.693392] env[70020]: DEBUG nova.objects.instance [None req-b38451ea-4e47-44d0-b69e-c3b331736001 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lazy-loading 'resources' on Instance uuid e5c6ad2e-9925-4234-a7da-ea2618b7c7d5 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1106.698462] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1106.698462] env[70020]: value = "task-3618978" [ 1106.698462] env[70020]: _type = "Task" [ 1106.698462] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.706606] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618978, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.951869] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1107.004781] env[70020]: DEBUG oslo_vmware.api [None req-976cd10e-5e5c-48a1-907c-a832163549cc tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3618976, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152163} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.005468] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-976cd10e-5e5c-48a1-907c-a832163549cc tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1107.005584] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-976cd10e-5e5c-48a1-907c-a832163549cc tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1107.005734] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-976cd10e-5e5c-48a1-907c-a832163549cc tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1107.005874] env[70020]: INFO nova.compute.manager [None req-976cd10e-5e5c-48a1-907c-a832163549cc tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1107.006957] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-976cd10e-5e5c-48a1-907c-a832163549cc tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1107.006957] env[70020]: DEBUG nova.compute.manager [-] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1107.006957] env[70020]: DEBUG nova.network.neutron [-] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1107.017885] env[70020]: DEBUG oslo_vmware.api [None req-1dfbf0a8-6d74-40ac-b24f-b88a6fa08203 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618977, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164357} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.017885] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-1dfbf0a8-6d74-40ac-b24f-b88a6fa08203 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1107.018075] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-1dfbf0a8-6d74-40ac-b24f-b88a6fa08203 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1107.018116] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-1dfbf0a8-6d74-40ac-b24f-b88a6fa08203 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1107.018267] env[70020]: INFO nova.compute.manager [None req-1dfbf0a8-6d74-40ac-b24f-b88a6fa08203 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1107.018492] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1dfbf0a8-6d74-40ac-b24f-b88a6fa08203 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1107.018675] env[70020]: DEBUG nova.compute.manager [-] [instance: f1a09304-7725-489a-8669-322a51c709e5] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1107.018774] env[70020]: DEBUG nova.network.neutron [-] [instance: f1a09304-7725-489a-8669-322a51c709e5] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1107.214121] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618978, 'name': CreateVM_Task} progress is 25%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.264296] env[70020]: INFO nova.scheduler.client.report [None req-f8ed5a56-f322-4e4a-afe3-91d1ddb14bfe tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Deleted allocation for migration 1a2443fb-c003-49f2-8631-b910a7bd8e63 [ 1107.433511] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4fc5394b-d85d-4b9c-ab65-0b6e51f82a0e tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "interface-c972e083-8c91-4875-a8c6-8257b06c93a1-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1107.433787] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4fc5394b-d85d-4b9c-ab65-0b6e51f82a0e tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "interface-c972e083-8c91-4875-a8c6-8257b06c93a1-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1107.434161] env[70020]: DEBUG nova.objects.instance [None req-4fc5394b-d85d-4b9c-ab65-0b6e51f82a0e tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lazy-loading 'flavor' on Instance uuid c972e083-8c91-4875-a8c6-8257b06c93a1 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1107.440543] env[70020]: DEBUG nova.network.neutron [req-cf2cc43f-d9a6-467b-97b1-fc0b764ac023 req-9ded5850-99c8-4143-8ff6-3ce9980b79c6 service nova] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Updated VIF entry in instance network info cache for port 8cef7e32-bdf1-41df-be67-ab80c6f894de. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1107.440543] env[70020]: DEBUG nova.network.neutron [req-cf2cc43f-d9a6-467b-97b1-fc0b764ac023 req-9ded5850-99c8-4143-8ff6-3ce9980b79c6 service nova] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Updating instance_info_cache with network_info: [{"id": "8cef7e32-bdf1-41df-be67-ab80c6f894de", "address": "fa:16:3e:70:3d:f6", "network": {"id": "405de05e-83d0-46c5-9397-bb1ba00f7ab7", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-202335876-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16f59a8f930846ec9299416b9ec5dd48", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8cef7e32-bd", "ovs_interfaceid": "8cef7e32-bdf1-41df-be67-ab80c6f894de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1107.452383] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85f706ec-3927-44ba-b012-e7b71c070a1b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.460951] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ece0e93-7c3c-42e2-bff4-0f34d2e7da5a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.491578] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57a760d2-3d34-440c-a6f4-8b728e059236 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.499086] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34e33056-58eb-4c01-80be-1ee80b340080 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.512335] env[70020]: DEBUG nova.compute.provider_tree [None req-b38451ea-4e47-44d0-b69e-c3b331736001 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1107.669790] env[70020]: DEBUG nova.compute.manager [req-4475c5ea-c195-4f90-847c-17f774be4a5a req-6c99d45b-7b6e-413e-8065-b4558ad47b4a service nova] [instance: f1a09304-7725-489a-8669-322a51c709e5] Received event network-vif-deleted-36abe182-e89a-4325-9d00-ce204c53a359 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1107.669910] env[70020]: INFO nova.compute.manager [req-4475c5ea-c195-4f90-847c-17f774be4a5a req-6c99d45b-7b6e-413e-8065-b4558ad47b4a service nova] [instance: f1a09304-7725-489a-8669-322a51c709e5] Neutron deleted interface 36abe182-e89a-4325-9d00-ce204c53a359; detaching it from the instance and deleting it from the info cache [ 1107.670108] env[70020]: DEBUG nova.network.neutron [req-4475c5ea-c195-4f90-847c-17f774be4a5a req-6c99d45b-7b6e-413e-8065-b4558ad47b4a service nova] [instance: f1a09304-7725-489a-8669-322a51c709e5] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1107.711962] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618978, 'name': CreateVM_Task, 'duration_secs': 0.723552} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.714031] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1107.714031] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1107.714031] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1107.714031] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1107.714245] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4322d02a-9541-434b-9fc3-884f59e979fe {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.718383] env[70020]: DEBUG oslo_vmware.api [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 1107.718383] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52ce72f2-aaa4-627c-60d0-38b08bacd5fc" [ 1107.718383] env[70020]: _type = "Task" [ 1107.718383] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.727205] env[70020]: DEBUG oslo_vmware.api [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ce72f2-aaa4-627c-60d0-38b08bacd5fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.750334] env[70020]: DEBUG nova.network.neutron [-] [instance: f1a09304-7725-489a-8669-322a51c709e5] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1107.776767] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f8ed5a56-f322-4e4a-afe3-91d1ddb14bfe tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "8adadb2e-2a20-45b1-bed8-34e09df25f39" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 18.607s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1107.781249] env[70020]: DEBUG nova.network.neutron [-] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1107.838053] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1107.838417] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1107.942281] env[70020]: DEBUG oslo_concurrency.lockutils [req-cf2cc43f-d9a6-467b-97b1-fc0b764ac023 req-9ded5850-99c8-4143-8ff6-3ce9980b79c6 service nova] Releasing lock "refresh_cache-f9d4837f-0e3f-4a83-9055-04d17ef3eb23" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1108.009124] env[70020]: DEBUG nova.objects.instance [None req-4fc5394b-d85d-4b9c-ab65-0b6e51f82a0e tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lazy-loading 'pci_requests' on Instance uuid c972e083-8c91-4875-a8c6-8257b06c93a1 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1108.015329] env[70020]: DEBUG nova.scheduler.client.report [None req-b38451ea-4e47-44d0-b69e-c3b331736001 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1108.173062] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8e425a8c-89c4-4b32-8ed9-6275f43442bb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.183100] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43411a74-6a78-4214-9888-813c8f45bfe3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.216748] env[70020]: DEBUG nova.compute.manager [req-4475c5ea-c195-4f90-847c-17f774be4a5a req-6c99d45b-7b6e-413e-8065-b4558ad47b4a service nova] [instance: f1a09304-7725-489a-8669-322a51c709e5] Detach interface failed, port_id=36abe182-e89a-4325-9d00-ce204c53a359, reason: Instance f1a09304-7725-489a-8669-322a51c709e5 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1108.228150] env[70020]: DEBUG oslo_vmware.api [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ce72f2-aaa4-627c-60d0-38b08bacd5fc, 'name': SearchDatastore_Task, 'duration_secs': 0.011205} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.228430] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1108.228658] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1108.228893] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.229050] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1108.229234] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1108.229480] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d087e9aa-0826-4cbf-8471-83fc880d86ac {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.237719] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1108.237890] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1108.238596] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9cf4fb3a-380c-449d-86f5-5d548f60af2d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.243365] env[70020]: DEBUG oslo_vmware.api [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 1108.243365] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52a5f726-3ec7-0a93-daff-d53d8097f878" [ 1108.243365] env[70020]: _type = "Task" [ 1108.243365] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.250782] env[70020]: DEBUG oslo_vmware.api [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52a5f726-3ec7-0a93-daff-d53d8097f878, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.252242] env[70020]: INFO nova.compute.manager [-] [instance: f1a09304-7725-489a-8669-322a51c709e5] Took 1.23 seconds to deallocate network for instance. [ 1108.284199] env[70020]: INFO nova.compute.manager [-] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Took 1.28 seconds to deallocate network for instance. [ 1108.344842] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1108.345034] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1108.345267] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1108.345455] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1108.345605] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1108.345753] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1108.345882] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=70020) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1108.346371] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager.update_available_resource {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1108.511743] env[70020]: DEBUG nova.objects.base [None req-4fc5394b-d85d-4b9c-ab65-0b6e51f82a0e tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=70020) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1108.511807] env[70020]: DEBUG nova.network.neutron [None req-4fc5394b-d85d-4b9c-ab65-0b6e51f82a0e tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1108.519676] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b38451ea-4e47-44d0-b69e-c3b331736001 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.826s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1108.522493] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ef6d8cc0-451b-4b46-bcaf-0eedc7adb4e2 tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.017s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1108.522635] env[70020]: DEBUG nova.objects.instance [None req-ef6d8cc0-451b-4b46-bcaf-0eedc7adb4e2 tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Lazy-loading 'resources' on Instance uuid 3163a070-a0db-4a41-af32-dfbe7a1766ac {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1108.542212] env[70020]: INFO nova.scheduler.client.report [None req-b38451ea-4e47-44d0-b69e-c3b331736001 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Deleted allocations for instance e5c6ad2e-9925-4234-a7da-ea2618b7c7d5 [ 1108.551306] env[70020]: DEBUG nova.policy [None req-4fc5394b-d85d-4b9c-ab65-0b6e51f82a0e tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '914fc4078a214da891e7d12d242504cb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0573da12f56f4b18a103e4e9fdfb9c19', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1108.576566] env[70020]: DEBUG nova.compute.manager [req-e6db894c-0a6c-49d0-9155-06454d752f0f req-8b4da31d-834d-4ec1-b6c4-8a4bc0706bff service nova] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Received event network-vif-deleted-d9bd6893-0205-4ae9-9f12-07dbc827824e {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1108.699065] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf74d57d-baa8-4165-8045-1dd70090ebff tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "8adadb2e-2a20-45b1-bed8-34e09df25f39" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1108.699442] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf74d57d-baa8-4165-8045-1dd70090ebff tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "8adadb2e-2a20-45b1-bed8-34e09df25f39" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1108.699746] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf74d57d-baa8-4165-8045-1dd70090ebff tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "8adadb2e-2a20-45b1-bed8-34e09df25f39-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1108.700015] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf74d57d-baa8-4165-8045-1dd70090ebff tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "8adadb2e-2a20-45b1-bed8-34e09df25f39-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1108.700278] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf74d57d-baa8-4165-8045-1dd70090ebff tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "8adadb2e-2a20-45b1-bed8-34e09df25f39-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1108.702912] env[70020]: INFO nova.compute.manager [None req-bf74d57d-baa8-4165-8045-1dd70090ebff tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Terminating instance [ 1108.754663] env[70020]: DEBUG oslo_vmware.api [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52a5f726-3ec7-0a93-daff-d53d8097f878, 'name': SearchDatastore_Task, 'duration_secs': 0.008829} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.755487] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-728755a8-9f5d-4920-9c3e-a4f880fd29b6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.758454] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1dfbf0a8-6d74-40ac-b24f-b88a6fa08203 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1108.761676] env[70020]: DEBUG oslo_vmware.api [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 1108.761676] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]520012d2-61b8-f01d-9a0c-1ad8102b8a48" [ 1108.761676] env[70020]: _type = "Task" [ 1108.761676] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.769400] env[70020]: DEBUG oslo_vmware.api [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]520012d2-61b8-f01d-9a0c-1ad8102b8a48, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.790115] env[70020]: DEBUG oslo_concurrency.lockutils [None req-976cd10e-5e5c-48a1-907c-a832163549cc tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1108.827431] env[70020]: DEBUG nova.network.neutron [None req-4fc5394b-d85d-4b9c-ab65-0b6e51f82a0e tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Successfully created port: 5380cda0-d51f-4970-a418-c89ed561db06 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1108.849128] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1109.052129] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b38451ea-4e47-44d0-b69e-c3b331736001 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "e5c6ad2e-9925-4234-a7da-ea2618b7c7d5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.136s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1109.203763] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eab993d-4107-4867-a046-1098a07754d8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.207109] env[70020]: DEBUG nova.compute.manager [None req-bf74d57d-baa8-4165-8045-1dd70090ebff tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1109.207335] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bf74d57d-baa8-4165-8045-1dd70090ebff tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1109.208081] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29ae5a0d-53e5-46fd-8641-acf1dc116468 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.217297] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49dabe8d-a21e-4379-9527-0a124da5d60f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.220555] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf74d57d-baa8-4165-8045-1dd70090ebff tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1109.220783] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-190f0ebf-98d1-48c1-88b3-c21b8774ed7d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.253976] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e092353e-ff32-41fe-a76c-1a6b4f726d04 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.256630] env[70020]: DEBUG oslo_vmware.api [None req-bf74d57d-baa8-4165-8045-1dd70090ebff tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1109.256630] env[70020]: value = "task-3618979" [ 1109.256630] env[70020]: _type = "Task" [ 1109.256630] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.265885] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eb74756-0cb8-4502-9cb6-8ebfd85d902d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.272261] env[70020]: DEBUG oslo_vmware.api [None req-bf74d57d-baa8-4165-8045-1dd70090ebff tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618979, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.278022] env[70020]: DEBUG oslo_vmware.api [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]520012d2-61b8-f01d-9a0c-1ad8102b8a48, 'name': SearchDatastore_Task, 'duration_secs': 0.010138} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.285654] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1109.285947] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] f9d4837f-0e3f-4a83-9055-04d17ef3eb23/f9d4837f-0e3f-4a83-9055-04d17ef3eb23.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1109.286657] env[70020]: DEBUG nova.compute.provider_tree [None req-ef6d8cc0-451b-4b46-bcaf-0eedc7adb4e2 tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1109.287974] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f59cffc9-7b93-4f2d-945a-9eee728a0ef5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.293945] env[70020]: DEBUG oslo_vmware.api [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 1109.293945] env[70020]: value = "task-3618980" [ 1109.293945] env[70020]: _type = "Task" [ 1109.293945] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.303749] env[70020]: DEBUG oslo_vmware.api [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618980, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.766601] env[70020]: DEBUG oslo_vmware.api [None req-bf74d57d-baa8-4165-8045-1dd70090ebff tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618979, 'name': PowerOffVM_Task, 'duration_secs': 0.204271} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.766911] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf74d57d-baa8-4165-8045-1dd70090ebff tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1109.766985] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bf74d57d-baa8-4165-8045-1dd70090ebff tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1109.767250] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c6688f5e-0b73-4d99-a952-46dd183c40da {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.791744] env[70020]: DEBUG nova.scheduler.client.report [None req-ef6d8cc0-451b-4b46-bcaf-0eedc7adb4e2 tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1109.803117] env[70020]: DEBUG oslo_vmware.api [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618980, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.448017} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.803826] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] f9d4837f-0e3f-4a83-9055-04d17ef3eb23/f9d4837f-0e3f-4a83-9055-04d17ef3eb23.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1109.803826] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1109.803826] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0be66422-dd14-41d7-888c-424db1db0363 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.810190] env[70020]: DEBUG oslo_vmware.api [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 1109.810190] env[70020]: value = "task-3618982" [ 1109.810190] env[70020]: _type = "Task" [ 1109.810190] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.819450] env[70020]: DEBUG oslo_vmware.api [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618982, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.838392] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bf74d57d-baa8-4165-8045-1dd70090ebff tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1109.838611] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bf74d57d-baa8-4165-8045-1dd70090ebff tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1109.838838] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf74d57d-baa8-4165-8045-1dd70090ebff tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Deleting the datastore file [datastore1] 8adadb2e-2a20-45b1-bed8-34e09df25f39 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1109.839128] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1eedf826-6187-4e7d-bf93-b4d0aa83ee4f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.845694] env[70020]: DEBUG oslo_vmware.api [None req-bf74d57d-baa8-4165-8045-1dd70090ebff tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1109.845694] env[70020]: value = "task-3618983" [ 1109.845694] env[70020]: _type = "Task" [ 1109.845694] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.853851] env[70020]: DEBUG oslo_vmware.api [None req-bf74d57d-baa8-4165-8045-1dd70090ebff tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618983, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.955984] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "c0a78ace-307e-4156-beb3-a53061acff7f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1109.956273] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "c0a78ace-307e-4156-beb3-a53061acff7f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1110.299125] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ef6d8cc0-451b-4b46-bcaf-0eedc7adb4e2 tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.777s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1110.301579] env[70020]: DEBUG oslo_concurrency.lockutils [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.706s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1110.301798] env[70020]: DEBUG nova.objects.instance [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Lazy-loading 'resources' on Instance uuid db24c4e0-f778-4488-b9cb-a06b21932b4e {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1110.307699] env[70020]: DEBUG nova.network.neutron [None req-4fc5394b-d85d-4b9c-ab65-0b6e51f82a0e tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Successfully updated port: 5380cda0-d51f-4970-a418-c89ed561db06 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1110.321163] env[70020]: DEBUG oslo_vmware.api [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618982, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066622} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.322214] env[70020]: INFO nova.scheduler.client.report [None req-ef6d8cc0-451b-4b46-bcaf-0eedc7adb4e2 tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Deleted allocations for instance 3163a070-a0db-4a41-af32-dfbe7a1766ac [ 1110.326896] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1110.327996] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dff3ded7-3ba4-42ba-8514-0664a33a61a2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.353589] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] f9d4837f-0e3f-4a83-9055-04d17ef3eb23/f9d4837f-0e3f-4a83-9055-04d17ef3eb23.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1110.354174] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c48e3349-af5f-468e-a814-19f180014683 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.378667] env[70020]: DEBUG oslo_vmware.api [None req-bf74d57d-baa8-4165-8045-1dd70090ebff tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3618983, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137254} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.379820] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf74d57d-baa8-4165-8045-1dd70090ebff tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1110.380024] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bf74d57d-baa8-4165-8045-1dd70090ebff tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1110.380211] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bf74d57d-baa8-4165-8045-1dd70090ebff tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1110.380384] env[70020]: INFO nova.compute.manager [None req-bf74d57d-baa8-4165-8045-1dd70090ebff tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1110.380620] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bf74d57d-baa8-4165-8045-1dd70090ebff tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1110.381140] env[70020]: DEBUG oslo_vmware.api [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 1110.381140] env[70020]: value = "task-3618984" [ 1110.381140] env[70020]: _type = "Task" [ 1110.381140] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.381332] env[70020]: DEBUG nova.compute.manager [-] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1110.381432] env[70020]: DEBUG nova.network.neutron [-] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1110.390822] env[70020]: DEBUG oslo_vmware.api [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618984, 'name': ReconfigVM_Task} progress is 10%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.458331] env[70020]: DEBUG nova.compute.manager [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1110.602444] env[70020]: DEBUG nova.compute.manager [req-f555a454-8b6f-4568-af94-e3fae98d7bc2 req-40454770-0b62-49d8-bc3c-69e4a7a8f118 service nova] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Received event network-vif-plugged-5380cda0-d51f-4970-a418-c89ed561db06 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1110.602718] env[70020]: DEBUG oslo_concurrency.lockutils [req-f555a454-8b6f-4568-af94-e3fae98d7bc2 req-40454770-0b62-49d8-bc3c-69e4a7a8f118 service nova] Acquiring lock "c972e083-8c91-4875-a8c6-8257b06c93a1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1110.602940] env[70020]: DEBUG oslo_concurrency.lockutils [req-f555a454-8b6f-4568-af94-e3fae98d7bc2 req-40454770-0b62-49d8-bc3c-69e4a7a8f118 service nova] Lock "c972e083-8c91-4875-a8c6-8257b06c93a1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1110.603558] env[70020]: DEBUG oslo_concurrency.lockutils [req-f555a454-8b6f-4568-af94-e3fae98d7bc2 req-40454770-0b62-49d8-bc3c-69e4a7a8f118 service nova] Lock "c972e083-8c91-4875-a8c6-8257b06c93a1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1110.603791] env[70020]: DEBUG nova.compute.manager [req-f555a454-8b6f-4568-af94-e3fae98d7bc2 req-40454770-0b62-49d8-bc3c-69e4a7a8f118 service nova] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] No waiting events found dispatching network-vif-plugged-5380cda0-d51f-4970-a418-c89ed561db06 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1110.603948] env[70020]: WARNING nova.compute.manager [req-f555a454-8b6f-4568-af94-e3fae98d7bc2 req-40454770-0b62-49d8-bc3c-69e4a7a8f118 service nova] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Received unexpected event network-vif-plugged-5380cda0-d51f-4970-a418-c89ed561db06 for instance with vm_state active and task_state None. [ 1110.604144] env[70020]: DEBUG nova.compute.manager [req-f555a454-8b6f-4568-af94-e3fae98d7bc2 req-40454770-0b62-49d8-bc3c-69e4a7a8f118 service nova] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Received event network-changed-5380cda0-d51f-4970-a418-c89ed561db06 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1110.604315] env[70020]: DEBUG nova.compute.manager [req-f555a454-8b6f-4568-af94-e3fae98d7bc2 req-40454770-0b62-49d8-bc3c-69e4a7a8f118 service nova] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Refreshing instance network info cache due to event network-changed-5380cda0-d51f-4970-a418-c89ed561db06. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1110.604555] env[70020]: DEBUG oslo_concurrency.lockutils [req-f555a454-8b6f-4568-af94-e3fae98d7bc2 req-40454770-0b62-49d8-bc3c-69e4a7a8f118 service nova] Acquiring lock "refresh_cache-c972e083-8c91-4875-a8c6-8257b06c93a1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1110.604714] env[70020]: DEBUG oslo_concurrency.lockutils [req-f555a454-8b6f-4568-af94-e3fae98d7bc2 req-40454770-0b62-49d8-bc3c-69e4a7a8f118 service nova] Acquired lock "refresh_cache-c972e083-8c91-4875-a8c6-8257b06c93a1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1110.604881] env[70020]: DEBUG nova.network.neutron [req-f555a454-8b6f-4568-af94-e3fae98d7bc2 req-40454770-0b62-49d8-bc3c-69e4a7a8f118 service nova] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Refreshing network info cache for port 5380cda0-d51f-4970-a418-c89ed561db06 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1110.811067] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4fc5394b-d85d-4b9c-ab65-0b6e51f82a0e tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "refresh_cache-c972e083-8c91-4875-a8c6-8257b06c93a1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1110.829802] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ef6d8cc0-451b-4b46-bcaf-0eedc7adb4e2 tempest-ServerAddressesTestJSON-1180661610 tempest-ServerAddressesTestJSON-1180661610-project-member] Lock "3163a070-a0db-4a41-af32-dfbe7a1766ac" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.859s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1110.893067] env[70020]: DEBUG oslo_vmware.api [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618984, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.975888] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1111.008502] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2282ed8-2c27-4df2-a47f-63b00456c5dd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.016691] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a39dc68-1624-4fa8-abd8-1b322f550041 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.051294] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b892ca3-07d4-426e-86b7-c08cdda11b68 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.059892] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93d2e0a0-69f0-4089-89c3-a39bc72bac4b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.075735] env[70020]: DEBUG nova.compute.provider_tree [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1111.298161] env[70020]: DEBUG nova.network.neutron [-] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1111.399029] env[70020]: DEBUG oslo_vmware.api [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618984, 'name': ReconfigVM_Task, 'duration_secs': 0.766044} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.399292] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Reconfigured VM instance instance-00000069 to attach disk [datastore1] f9d4837f-0e3f-4a83-9055-04d17ef3eb23/f9d4837f-0e3f-4a83-9055-04d17ef3eb23.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1111.399997] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3bc660de-c208-48e4-971e-707c5fd6c556 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.406992] env[70020]: DEBUG oslo_vmware.api [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 1111.406992] env[70020]: value = "task-3618985" [ 1111.406992] env[70020]: _type = "Task" [ 1111.406992] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.414988] env[70020]: DEBUG oslo_vmware.api [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618985, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.416137] env[70020]: DEBUG nova.network.neutron [req-f555a454-8b6f-4568-af94-e3fae98d7bc2 req-40454770-0b62-49d8-bc3c-69e4a7a8f118 service nova] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Added VIF to instance network info cache for port 5380cda0-d51f-4970-a418-c89ed561db06. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3546}} [ 1111.416509] env[70020]: DEBUG nova.network.neutron [req-f555a454-8b6f-4568-af94-e3fae98d7bc2 req-40454770-0b62-49d8-bc3c-69e4a7a8f118 service nova] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Updating instance_info_cache with network_info: [{"id": "179ff8c1-53f9-4484-9dce-1fd85174d71d", "address": "fa:16:3e:d7:53:7a", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap179ff8c1-53", "ovs_interfaceid": "179ff8c1-53f9-4484-9dce-1fd85174d71d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5380cda0-d51f-4970-a418-c89ed561db06", "address": "fa:16:3e:09:fd:72", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5380cda0-d5", "ovs_interfaceid": "5380cda0-d51f-4970-a418-c89ed561db06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1111.585159] env[70020]: DEBUG oslo_concurrency.lockutils [None req-378a9140-6120-45f2-8ef7-7980b8b4bc4b tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "8dbb1de0-38de-493f-9512-b8754bab7bcb" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1111.585424] env[70020]: DEBUG oslo_concurrency.lockutils [None req-378a9140-6120-45f2-8ef7-7980b8b4bc4b tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "8dbb1de0-38de-493f-9512-b8754bab7bcb" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1111.607216] env[70020]: ERROR nova.scheduler.client.report [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] [req-38c0c76f-f0fb-4cd7-a3fd-1c07830ec590] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-38c0c76f-f0fb-4cd7-a3fd-1c07830ec590"}]} [ 1111.626574] env[70020]: DEBUG nova.scheduler.client.report [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1111.644771] env[70020]: DEBUG nova.scheduler.client.report [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1111.645041] env[70020]: DEBUG nova.compute.provider_tree [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1111.660171] env[70020]: DEBUG nova.scheduler.client.report [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1111.678422] env[70020]: DEBUG nova.scheduler.client.report [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1111.801387] env[70020]: INFO nova.compute.manager [-] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Took 1.42 seconds to deallocate network for instance. [ 1111.862584] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e326fbac-c7f8-42fc-a003-42d517d4bdad {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.870832] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3de6d382-0e78-433a-b8a2-a6b51a2992e0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.903290] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4d884c5-2556-4240-acea-06a58bcdfaf1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.913239] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3280908-3cf9-4f54-bfa9-6a8ab84fda19 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.918784] env[70020]: DEBUG oslo_concurrency.lockutils [req-f555a454-8b6f-4568-af94-e3fae98d7bc2 req-40454770-0b62-49d8-bc3c-69e4a7a8f118 service nova] Releasing lock "refresh_cache-c972e083-8c91-4875-a8c6-8257b06c93a1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1111.929276] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4fc5394b-d85d-4b9c-ab65-0b6e51f82a0e tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquired lock "refresh_cache-c972e083-8c91-4875-a8c6-8257b06c93a1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1111.929542] env[70020]: DEBUG nova.network.neutron [None req-4fc5394b-d85d-4b9c-ab65-0b6e51f82a0e tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1111.931532] env[70020]: DEBUG nova.compute.provider_tree [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1111.933837] env[70020]: DEBUG oslo_vmware.api [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618985, 'name': Rename_Task, 'duration_secs': 0.144008} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.933837] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1111.934074] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d75f81d7-b148-4a23-a53e-1ec104536c2d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.940748] env[70020]: DEBUG oslo_vmware.api [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 1111.940748] env[70020]: value = "task-3618986" [ 1111.940748] env[70020]: _type = "Task" [ 1111.940748] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.949566] env[70020]: DEBUG oslo_vmware.api [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618986, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.088088] env[70020]: DEBUG nova.compute.utils [None req-378a9140-6120-45f2-8ef7-7980b8b4bc4b tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1112.307517] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf74d57d-baa8-4165-8045-1dd70090ebff tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1112.451435] env[70020]: DEBUG oslo_vmware.api [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618986, 'name': PowerOnVM_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.470595] env[70020]: DEBUG nova.scheduler.client.report [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Updated inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with generation 144 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1112.470914] env[70020]: DEBUG nova.compute.provider_tree [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Updating resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d generation from 144 to 145 during operation: update_inventory {{(pid=70020) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1112.471521] env[70020]: DEBUG nova.compute.provider_tree [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1112.482788] env[70020]: WARNING nova.network.neutron [None req-4fc5394b-d85d-4b9c-ab65-0b6e51f82a0e tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] 95372772-c776-417b-938a-f27c0d43d6ec already exists in list: networks containing: ['95372772-c776-417b-938a-f27c0d43d6ec']. ignoring it [ 1112.483017] env[70020]: WARNING nova.network.neutron [None req-4fc5394b-d85d-4b9c-ab65-0b6e51f82a0e tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] 95372772-c776-417b-938a-f27c0d43d6ec already exists in list: networks containing: ['95372772-c776-417b-938a-f27c0d43d6ec']. ignoring it [ 1112.483191] env[70020]: WARNING nova.network.neutron [None req-4fc5394b-d85d-4b9c-ab65-0b6e51f82a0e tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] 5380cda0-d51f-4970-a418-c89ed561db06 already exists in list: port_ids containing: ['5380cda0-d51f-4970-a418-c89ed561db06']. ignoring it [ 1112.591230] env[70020]: DEBUG oslo_concurrency.lockutils [None req-378a9140-6120-45f2-8ef7-7980b8b4bc4b tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "8dbb1de0-38de-493f-9512-b8754bab7bcb" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.753386] env[70020]: DEBUG nova.network.neutron [None req-4fc5394b-d85d-4b9c-ab65-0b6e51f82a0e tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Updating instance_info_cache with network_info: [{"id": "179ff8c1-53f9-4484-9dce-1fd85174d71d", "address": "fa:16:3e:d7:53:7a", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap179ff8c1-53", "ovs_interfaceid": "179ff8c1-53f9-4484-9dce-1fd85174d71d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5380cda0-d51f-4970-a418-c89ed561db06", "address": "fa:16:3e:09:fd:72", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5380cda0-d5", "ovs_interfaceid": "5380cda0-d51f-4970-a418-c89ed561db06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1112.951740] env[70020]: DEBUG oslo_vmware.api [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3618986, 'name': PowerOnVM_Task, 'duration_secs': 0.562529} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.952299] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1112.952622] env[70020]: INFO nova.compute.manager [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Took 8.35 seconds to spawn the instance on the hypervisor. [ 1112.952863] env[70020]: DEBUG nova.compute.manager [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1112.953669] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64147133-b8d9-4115-8408-a8d7414f9146 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.975911] env[70020]: DEBUG oslo_concurrency.lockutils [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.674s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.977994] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 8.932s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1112.978193] env[70020]: DEBUG nova.objects.instance [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: f1a09304-7725-489a-8669-322a51c709e5] Trying to apply a migration context that does not seem to be set for this instance {{(pid=70020) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1112.997354] env[70020]: INFO nova.scheduler.client.report [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Deleted allocations for instance db24c4e0-f778-4488-b9cb-a06b21932b4e [ 1113.256756] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4fc5394b-d85d-4b9c-ab65-0b6e51f82a0e tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Releasing lock "refresh_cache-c972e083-8c91-4875-a8c6-8257b06c93a1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1113.257435] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4fc5394b-d85d-4b9c-ab65-0b6e51f82a0e tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "c972e083-8c91-4875-a8c6-8257b06c93a1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1113.257621] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4fc5394b-d85d-4b9c-ab65-0b6e51f82a0e tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquired lock "c972e083-8c91-4875-a8c6-8257b06c93a1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1113.258591] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2cefaf9-7abc-462d-8809-126c3b95ffde {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.279157] env[70020]: DEBUG nova.virt.hardware [None req-4fc5394b-d85d-4b9c-ab65-0b6e51f82a0e tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1113.279410] env[70020]: DEBUG nova.virt.hardware [None req-4fc5394b-d85d-4b9c-ab65-0b6e51f82a0e tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1113.279568] env[70020]: DEBUG nova.virt.hardware [None req-4fc5394b-d85d-4b9c-ab65-0b6e51f82a0e tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1113.279748] env[70020]: DEBUG nova.virt.hardware [None req-4fc5394b-d85d-4b9c-ab65-0b6e51f82a0e tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1113.279888] env[70020]: DEBUG nova.virt.hardware [None req-4fc5394b-d85d-4b9c-ab65-0b6e51f82a0e tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1113.280041] env[70020]: DEBUG nova.virt.hardware [None req-4fc5394b-d85d-4b9c-ab65-0b6e51f82a0e tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1113.280243] env[70020]: DEBUG nova.virt.hardware [None req-4fc5394b-d85d-4b9c-ab65-0b6e51f82a0e tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1113.280400] env[70020]: DEBUG nova.virt.hardware [None req-4fc5394b-d85d-4b9c-ab65-0b6e51f82a0e tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1113.280579] env[70020]: DEBUG nova.virt.hardware [None req-4fc5394b-d85d-4b9c-ab65-0b6e51f82a0e tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1113.280738] env[70020]: DEBUG nova.virt.hardware [None req-4fc5394b-d85d-4b9c-ab65-0b6e51f82a0e tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1113.280903] env[70020]: DEBUG nova.virt.hardware [None req-4fc5394b-d85d-4b9c-ab65-0b6e51f82a0e tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1113.287084] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4fc5394b-d85d-4b9c-ab65-0b6e51f82a0e tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Reconfiguring VM to attach interface {{(pid=70020) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1113.288325] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d2a3d693-8dd7-4ca0-8ec3-08310b3cc041 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.305800] env[70020]: DEBUG oslo_vmware.api [None req-4fc5394b-d85d-4b9c-ab65-0b6e51f82a0e tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1113.305800] env[70020]: value = "task-3618987" [ 1113.305800] env[70020]: _type = "Task" [ 1113.305800] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.314702] env[70020]: DEBUG oslo_vmware.api [None req-4fc5394b-d85d-4b9c-ab65-0b6e51f82a0e tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3618987, 'name': ReconfigVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.470538] env[70020]: INFO nova.compute.manager [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Took 38.05 seconds to build instance. [ 1113.506773] env[70020]: DEBUG oslo_concurrency.lockutils [None req-56a3bdbb-0445-46d7-9c23-8f032c05dcdb tempest-ServerMetadataTestJSON-787941395 tempest-ServerMetadataTestJSON-787941395-project-member] Lock "db24c4e0-f778-4488-b9cb-a06b21932b4e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.295s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1113.596147] env[70020]: DEBUG nova.compute.manager [req-d81a838f-2968-48ca-bc2a-9cd412fc3b80 req-e6d8d869-8779-47aa-8c51-b3596dccdcc7 service nova] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Received event network-vif-deleted-4b681dd6-fab3-4812-988e-26b219b6c5c3 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1113.789324] env[70020]: DEBUG oslo_concurrency.lockutils [None req-378a9140-6120-45f2-8ef7-7980b8b4bc4b tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "8dbb1de0-38de-493f-9512-b8754bab7bcb" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1113.789579] env[70020]: DEBUG oslo_concurrency.lockutils [None req-378a9140-6120-45f2-8ef7-7980b8b4bc4b tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "8dbb1de0-38de-493f-9512-b8754bab7bcb" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1113.789816] env[70020]: INFO nova.compute.manager [None req-378a9140-6120-45f2-8ef7-7980b8b4bc4b tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Attaching volume 1acb6a62-8f9b-4b43-be82-4306b549a1ba to /dev/sdb [ 1113.820580] env[70020]: DEBUG oslo_vmware.api [None req-4fc5394b-d85d-4b9c-ab65-0b6e51f82a0e tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3618987, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.827837] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d080dae0-8517-40d1-9090-4d9ca04afff8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.834948] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4576ad5-c88b-463c-8e7a-61684c1a1203 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.849533] env[70020]: DEBUG nova.virt.block_device [None req-378a9140-6120-45f2-8ef7-7980b8b4bc4b tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Updating existing volume attachment record: a043a575-a719-4de8-aaf5-6a0f103a23bb {{(pid=70020) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1113.972829] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9f52e54a-7b1a-436f-8391-67dce6cf6d39 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "f9d4837f-0e3f-4a83-9055-04d17ef3eb23" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.564s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1113.987294] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1c32e25e-33ae-48a8-845c-0c31d6b55ef9 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1113.988595] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.037s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1113.990122] env[70020]: INFO nova.compute.claims [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1114.323646] env[70020]: DEBUG oslo_vmware.api [None req-4fc5394b-d85d-4b9c-ab65-0b6e51f82a0e tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3618987, 'name': ReconfigVM_Task, 'duration_secs': 0.699274} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.324293] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4fc5394b-d85d-4b9c-ab65-0b6e51f82a0e tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Releasing lock "c972e083-8c91-4875-a8c6-8257b06c93a1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1114.324542] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4fc5394b-d85d-4b9c-ab65-0b6e51f82a0e tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Reconfigured VM to attach interface {{(pid=70020) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1114.829610] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4fc5394b-d85d-4b9c-ab65-0b6e51f82a0e tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "interface-c972e083-8c91-4875-a8c6-8257b06c93a1-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.396s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1115.164942] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b4c4334-95d8-435d-b646-aad36029b750 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.172636] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ba33352-6505-496b-94b4-4453ba7e35db {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.203057] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd65d8c5-7a9f-4f8e-89ec-a497350a7f76 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.212025] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fd088b9-7875-44c5-ab22-706eb7d2b0d8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.225376] env[70020]: DEBUG nova.compute.provider_tree [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1115.708553] env[70020]: DEBUG nova.compute.manager [req-e45cfa9c-2703-4502-9481-34ecbbda2ba4 req-fa8a47c1-29e1-4efd-88c7-841e918264ee service nova] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Received event network-changed-8cef7e32-bdf1-41df-be67-ab80c6f894de {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1115.708705] env[70020]: DEBUG nova.compute.manager [req-e45cfa9c-2703-4502-9481-34ecbbda2ba4 req-fa8a47c1-29e1-4efd-88c7-841e918264ee service nova] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Refreshing instance network info cache due to event network-changed-8cef7e32-bdf1-41df-be67-ab80c6f894de. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1115.708924] env[70020]: DEBUG oslo_concurrency.lockutils [req-e45cfa9c-2703-4502-9481-34ecbbda2ba4 req-fa8a47c1-29e1-4efd-88c7-841e918264ee service nova] Acquiring lock "refresh_cache-f9d4837f-0e3f-4a83-9055-04d17ef3eb23" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1115.709079] env[70020]: DEBUG oslo_concurrency.lockutils [req-e45cfa9c-2703-4502-9481-34ecbbda2ba4 req-fa8a47c1-29e1-4efd-88c7-841e918264ee service nova] Acquired lock "refresh_cache-f9d4837f-0e3f-4a83-9055-04d17ef3eb23" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1115.709729] env[70020]: DEBUG nova.network.neutron [req-e45cfa9c-2703-4502-9481-34ecbbda2ba4 req-fa8a47c1-29e1-4efd-88c7-841e918264ee service nova] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Refreshing network info cache for port 8cef7e32-bdf1-41df-be67-ab80c6f894de {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1115.728445] env[70020]: DEBUG nova.scheduler.client.report [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1116.236796] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.248s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1116.237313] env[70020]: DEBUG nova.compute.manager [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1116.242547] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1dfbf0a8-6d74-40ac-b24f-b88a6fa08203 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.484s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1116.242804] env[70020]: DEBUG nova.objects.instance [None req-1dfbf0a8-6d74-40ac-b24f-b88a6fa08203 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lazy-loading 'resources' on Instance uuid f1a09304-7725-489a-8669-322a51c709e5 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1116.386915] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f3930ad2-4c80-4d81-9576-5719a28ab5a2 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "interface-c972e083-8c91-4875-a8c6-8257b06c93a1-5380cda0-d51f-4970-a418-c89ed561db06" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1116.387193] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f3930ad2-4c80-4d81-9576-5719a28ab5a2 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "interface-c972e083-8c91-4875-a8c6-8257b06c93a1-5380cda0-d51f-4970-a418-c89ed561db06" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1116.399903] env[70020]: DEBUG oslo_concurrency.lockutils [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Acquiring lock "899183eb-ba25-491f-b981-77a33239ed74" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1116.400196] env[70020]: DEBUG oslo_concurrency.lockutils [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Lock "899183eb-ba25-491f-b981-77a33239ed74" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1116.438275] env[70020]: DEBUG nova.network.neutron [req-e45cfa9c-2703-4502-9481-34ecbbda2ba4 req-fa8a47c1-29e1-4efd-88c7-841e918264ee service nova] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Updated VIF entry in instance network info cache for port 8cef7e32-bdf1-41df-be67-ab80c6f894de. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1116.438648] env[70020]: DEBUG nova.network.neutron [req-e45cfa9c-2703-4502-9481-34ecbbda2ba4 req-fa8a47c1-29e1-4efd-88c7-841e918264ee service nova] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Updating instance_info_cache with network_info: [{"id": "8cef7e32-bdf1-41df-be67-ab80c6f894de", "address": "fa:16:3e:70:3d:f6", "network": {"id": "405de05e-83d0-46c5-9397-bb1ba00f7ab7", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-202335876-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.197", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16f59a8f930846ec9299416b9ec5dd48", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8cef7e32-bd", "ovs_interfaceid": "8cef7e32-bdf1-41df-be67-ab80c6f894de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.615047] env[70020]: DEBUG oslo_concurrency.lockutils [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Acquiring lock "d0252c4e-0991-45b9-bf0b-b8e41093e518" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1116.615326] env[70020]: DEBUG oslo_concurrency.lockutils [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Lock "d0252c4e-0991-45b9-bf0b-b8e41093e518" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1116.748653] env[70020]: DEBUG nova.compute.utils [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1116.750140] env[70020]: DEBUG nova.compute.manager [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1116.750311] env[70020]: DEBUG nova.network.neutron [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1116.785638] env[70020]: DEBUG nova.policy [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1374458c1943470eba7e774715ba1ca9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f3f6d704dd464768953c41d34d34d944', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1116.890396] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f3930ad2-4c80-4d81-9576-5719a28ab5a2 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "c972e083-8c91-4875-a8c6-8257b06c93a1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1116.890591] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f3930ad2-4c80-4d81-9576-5719a28ab5a2 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquired lock "c972e083-8c91-4875-a8c6-8257b06c93a1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1116.891831] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ec3215d-5991-46ec-90ff-52bbda7c2946 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.912496] env[70020]: DEBUG nova.compute.manager [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: 899183eb-ba25-491f-b981-77a33239ed74] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1116.917467] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1751ee4d-537e-4502-b9ca-b1d7c6e4325a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.943012] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-f3930ad2-4c80-4d81-9576-5719a28ab5a2 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Reconfiguring VM to detach interface {{(pid=70020) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1116.946067] env[70020]: DEBUG oslo_concurrency.lockutils [req-e45cfa9c-2703-4502-9481-34ecbbda2ba4 req-fa8a47c1-29e1-4efd-88c7-841e918264ee service nova] Releasing lock "refresh_cache-f9d4837f-0e3f-4a83-9055-04d17ef3eb23" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1116.946427] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b0d9732c-c9ed-427f-9f87-5a9180f51f7b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.965753] env[70020]: DEBUG oslo_vmware.api [None req-f3930ad2-4c80-4d81-9576-5719a28ab5a2 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1116.965753] env[70020]: value = "task-3618992" [ 1116.965753] env[70020]: _type = "Task" [ 1116.965753] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.971186] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-373223c2-ad24-49aa-9265-e41ec3f8a86b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.977329] env[70020]: DEBUG oslo_vmware.api [None req-f3930ad2-4c80-4d81-9576-5719a28ab5a2 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3618992, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.982386] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc54a303-e265-48a0-8ab9-2202a9de906f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.015957] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1afdc58f-cfa8-4671-8403-2443df2b16f1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.023593] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b9b132c-fc07-4b56-9faa-023a851eb792 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.040826] env[70020]: DEBUG nova.compute.provider_tree [None req-1dfbf0a8-6d74-40ac-b24f-b88a6fa08203 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1117.059013] env[70020]: DEBUG nova.network.neutron [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Successfully created port: ff971a6c-7fab-4c04-a75d-259986b9fce0 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1117.117696] env[70020]: DEBUG nova.compute.manager [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1117.253061] env[70020]: DEBUG nova.compute.manager [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1117.434939] env[70020]: DEBUG oslo_concurrency.lockutils [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1117.475852] env[70020]: DEBUG oslo_vmware.api [None req-f3930ad2-4c80-4d81-9576-5719a28ab5a2 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3618992, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.543310] env[70020]: DEBUG nova.scheduler.client.report [None req-1dfbf0a8-6d74-40ac-b24f-b88a6fa08203 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1117.636215] env[70020]: DEBUG oslo_concurrency.lockutils [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1117.976486] env[70020]: DEBUG oslo_vmware.api [None req-f3930ad2-4c80-4d81-9576-5719a28ab5a2 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3618992, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.047549] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1dfbf0a8-6d74-40ac-b24f-b88a6fa08203 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.805s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1118.050976] env[70020]: DEBUG oslo_concurrency.lockutils [None req-976cd10e-5e5c-48a1-907c-a832163549cc tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.261s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1118.051258] env[70020]: DEBUG nova.objects.instance [None req-976cd10e-5e5c-48a1-907c-a832163549cc tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lazy-loading 'resources' on Instance uuid 0453722d-258f-49e3-b61e-f1081eb465c6 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1118.071156] env[70020]: INFO nova.scheduler.client.report [None req-1dfbf0a8-6d74-40ac-b24f-b88a6fa08203 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Deleted allocations for instance f1a09304-7725-489a-8669-322a51c709e5 [ 1118.262691] env[70020]: DEBUG nova.compute.manager [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1118.288198] env[70020]: DEBUG nova.virt.hardware [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1118.288457] env[70020]: DEBUG nova.virt.hardware [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1118.288687] env[70020]: DEBUG nova.virt.hardware [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1118.288789] env[70020]: DEBUG nova.virt.hardware [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1118.288932] env[70020]: DEBUG nova.virt.hardware [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1118.289088] env[70020]: DEBUG nova.virt.hardware [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1118.289296] env[70020]: DEBUG nova.virt.hardware [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1118.289561] env[70020]: DEBUG nova.virt.hardware [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1118.289625] env[70020]: DEBUG nova.virt.hardware [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1118.289771] env[70020]: DEBUG nova.virt.hardware [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1118.289971] env[70020]: DEBUG nova.virt.hardware [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1118.290840] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07fb36a2-e309-44fe-b58c-644cb8aea43e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.298867] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c86d23d-8270-4340-adde-662470e0cc5c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.410531] env[70020]: DEBUG nova.compute.manager [req-c84b259c-1b33-43a6-accc-ed9771459417 req-1b48d2d2-7233-4095-acee-d3f268d48742 service nova] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Received event network-vif-plugged-ff971a6c-7fab-4c04-a75d-259986b9fce0 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1118.410809] env[70020]: DEBUG oslo_concurrency.lockutils [req-c84b259c-1b33-43a6-accc-ed9771459417 req-1b48d2d2-7233-4095-acee-d3f268d48742 service nova] Acquiring lock "ce4796b0-4ad2-4468-9898-aaedce6dcd32-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1118.410958] env[70020]: DEBUG oslo_concurrency.lockutils [req-c84b259c-1b33-43a6-accc-ed9771459417 req-1b48d2d2-7233-4095-acee-d3f268d48742 service nova] Lock "ce4796b0-4ad2-4468-9898-aaedce6dcd32-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1118.411142] env[70020]: DEBUG oslo_concurrency.lockutils [req-c84b259c-1b33-43a6-accc-ed9771459417 req-1b48d2d2-7233-4095-acee-d3f268d48742 service nova] Lock "ce4796b0-4ad2-4468-9898-aaedce6dcd32-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1118.411313] env[70020]: DEBUG nova.compute.manager [req-c84b259c-1b33-43a6-accc-ed9771459417 req-1b48d2d2-7233-4095-acee-d3f268d48742 service nova] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] No waiting events found dispatching network-vif-plugged-ff971a6c-7fab-4c04-a75d-259986b9fce0 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1118.411480] env[70020]: WARNING nova.compute.manager [req-c84b259c-1b33-43a6-accc-ed9771459417 req-1b48d2d2-7233-4095-acee-d3f268d48742 service nova] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Received unexpected event network-vif-plugged-ff971a6c-7fab-4c04-a75d-259986b9fce0 for instance with vm_state building and task_state spawning. [ 1118.412615] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-378a9140-6120-45f2-8ef7-7980b8b4bc4b tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Volume attach. Driver type: vmdk {{(pid=70020) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1118.412837] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-378a9140-6120-45f2-8ef7-7980b8b4bc4b tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721804', 'volume_id': '1acb6a62-8f9b-4b43-be82-4306b549a1ba', 'name': 'volume-1acb6a62-8f9b-4b43-be82-4306b549a1ba', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8dbb1de0-38de-493f-9512-b8754bab7bcb', 'attached_at': '', 'detached_at': '', 'volume_id': '1acb6a62-8f9b-4b43-be82-4306b549a1ba', 'serial': '1acb6a62-8f9b-4b43-be82-4306b549a1ba'} {{(pid=70020) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1118.413987] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08eb0ade-9588-4509-82bd-c89912e8eef7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.430392] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-343a50de-3eb9-4b00-8c80-1bb176f4ed1d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.456566] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-378a9140-6120-45f2-8ef7-7980b8b4bc4b tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] volume-1acb6a62-8f9b-4b43-be82-4306b549a1ba/volume-1acb6a62-8f9b-4b43-be82-4306b549a1ba.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1118.456860] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8bb844a0-10c1-49bf-b50d-72e57207400c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.480325] env[70020]: DEBUG oslo_vmware.api [None req-f3930ad2-4c80-4d81-9576-5719a28ab5a2 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3618992, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.482185] env[70020]: DEBUG oslo_vmware.api [None req-378a9140-6120-45f2-8ef7-7980b8b4bc4b tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1118.482185] env[70020]: value = "task-3618993" [ 1118.482185] env[70020]: _type = "Task" [ 1118.482185] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.490136] env[70020]: DEBUG oslo_vmware.api [None req-378a9140-6120-45f2-8ef7-7980b8b4bc4b tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618993, 'name': ReconfigVM_Task} progress is 10%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.500027] env[70020]: DEBUG nova.network.neutron [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Successfully updated port: ff971a6c-7fab-4c04-a75d-259986b9fce0 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1118.579791] env[70020]: DEBUG oslo_concurrency.lockutils [None req-1dfbf0a8-6d74-40ac-b24f-b88a6fa08203 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "f1a09304-7725-489a-8669-322a51c709e5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.221s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1118.718931] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc406bba-71d2-4a06-8547-e228952dc515 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.726746] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbb87230-114b-4b75-a111-37a1131cc337 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.765317] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6b8d3f5-7e12-4149-897e-40beeedb1027 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.773441] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90ebfad2-20a0-475a-ac5a-7ca814896621 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.787181] env[70020]: DEBUG nova.compute.provider_tree [None req-976cd10e-5e5c-48a1-907c-a832163549cc tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1118.980277] env[70020]: DEBUG oslo_vmware.api [None req-f3930ad2-4c80-4d81-9576-5719a28ab5a2 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3618992, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.991839] env[70020]: DEBUG oslo_vmware.api [None req-378a9140-6120-45f2-8ef7-7980b8b4bc4b tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618993, 'name': ReconfigVM_Task, 'duration_secs': 0.316691} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.994245] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-378a9140-6120-45f2-8ef7-7980b8b4bc4b tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Reconfigured VM instance instance-00000063 to attach disk [datastore1] volume-1acb6a62-8f9b-4b43-be82-4306b549a1ba/volume-1acb6a62-8f9b-4b43-be82-4306b549a1ba.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1118.996759] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e6e623ed-d1be-4b28-aff8-9a49be072cca {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.006633] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "refresh_cache-ce4796b0-4ad2-4468-9898-aaedce6dcd32" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1119.006761] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquired lock "refresh_cache-ce4796b0-4ad2-4468-9898-aaedce6dcd32" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1119.006930] env[70020]: DEBUG nova.network.neutron [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1119.012499] env[70020]: DEBUG oslo_vmware.api [None req-378a9140-6120-45f2-8ef7-7980b8b4bc4b tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1119.012499] env[70020]: value = "task-3618994" [ 1119.012499] env[70020]: _type = "Task" [ 1119.012499] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.020996] env[70020]: DEBUG oslo_vmware.api [None req-378a9140-6120-45f2-8ef7-7980b8b4bc4b tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618994, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.290608] env[70020]: DEBUG nova.scheduler.client.report [None req-976cd10e-5e5c-48a1-907c-a832163549cc tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1119.480971] env[70020]: DEBUG oslo_vmware.api [None req-f3930ad2-4c80-4d81-9576-5719a28ab5a2 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3618992, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.520946] env[70020]: DEBUG oslo_vmware.api [None req-378a9140-6120-45f2-8ef7-7980b8b4bc4b tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618994, 'name': ReconfigVM_Task, 'duration_secs': 0.133142} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.521280] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-378a9140-6120-45f2-8ef7-7980b8b4bc4b tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721804', 'volume_id': '1acb6a62-8f9b-4b43-be82-4306b549a1ba', 'name': 'volume-1acb6a62-8f9b-4b43-be82-4306b549a1ba', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8dbb1de0-38de-493f-9512-b8754bab7bcb', 'attached_at': '', 'detached_at': '', 'volume_id': '1acb6a62-8f9b-4b43-be82-4306b549a1ba', 'serial': '1acb6a62-8f9b-4b43-be82-4306b549a1ba'} {{(pid=70020) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1119.539232] env[70020]: DEBUG nova.network.neutron [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1119.666705] env[70020]: DEBUG nova.network.neutron [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Updating instance_info_cache with network_info: [{"id": "ff971a6c-7fab-4c04-a75d-259986b9fce0", "address": "fa:16:3e:34:e6:98", "network": {"id": "c7c6ab1d-12b0-4699-ab0e-47b8d23ee3bc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2032377329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3f6d704dd464768953c41d34d34d944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6a6f7bb-6106-4cfd-9aef-b85628d0cefa", "external-id": "nsx-vlan-transportzone-194", "segmentation_id": 194, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff971a6c-7f", "ovs_interfaceid": "ff971a6c-7fab-4c04-a75d-259986b9fce0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1119.796016] env[70020]: DEBUG oslo_concurrency.lockutils [None req-976cd10e-5e5c-48a1-907c-a832163549cc tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.745s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1119.798960] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 10.950s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1119.799236] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1119.799667] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=70020) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1119.799971] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.824s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1119.801726] env[70020]: INFO nova.compute.claims [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1119.804761] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a35c36f4-cd0c-4826-b321-7286c6081182 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.814512] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7829936-5ead-42d7-ab2d-6480c7a86e8f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.820033] env[70020]: INFO nova.scheduler.client.report [None req-976cd10e-5e5c-48a1-907c-a832163549cc tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Deleted allocations for instance 0453722d-258f-49e3-b61e-f1081eb465c6 [ 1119.834976] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0910ca1-5fbd-4e95-94cc-ef530d0e7900 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.841097] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8af356d-bdeb-4491-89fb-19530b175994 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.873686] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179553MB free_disk=76GB free_vcpus=48 pci_devices=None {{(pid=70020) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1119.873848] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1119.981488] env[70020]: DEBUG oslo_vmware.api [None req-f3930ad2-4c80-4d81-9576-5719a28ab5a2 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3618992, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.169291] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Releasing lock "refresh_cache-ce4796b0-4ad2-4468-9898-aaedce6dcd32" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1120.169581] env[70020]: DEBUG nova.compute.manager [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Instance network_info: |[{"id": "ff971a6c-7fab-4c04-a75d-259986b9fce0", "address": "fa:16:3e:34:e6:98", "network": {"id": "c7c6ab1d-12b0-4699-ab0e-47b8d23ee3bc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2032377329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3f6d704dd464768953c41d34d34d944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6a6f7bb-6106-4cfd-9aef-b85628d0cefa", "external-id": "nsx-vlan-transportzone-194", "segmentation_id": 194, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff971a6c-7f", "ovs_interfaceid": "ff971a6c-7fab-4c04-a75d-259986b9fce0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1120.170018] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:34:e6:98', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd6a6f7bb-6106-4cfd-9aef-b85628d0cefa', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ff971a6c-7fab-4c04-a75d-259986b9fce0', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1120.177877] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1120.178136] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1120.178376] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-92a65a60-841d-45bc-8e8d-e529c6e68667 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.200912] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1120.200912] env[70020]: value = "task-3618995" [ 1120.200912] env[70020]: _type = "Task" [ 1120.200912] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.208908] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618995, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.327131] env[70020]: DEBUG oslo_concurrency.lockutils [None req-976cd10e-5e5c-48a1-907c-a832163549cc tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "0453722d-258f-49e3-b61e-f1081eb465c6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.960s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1120.437224] env[70020]: DEBUG nova.compute.manager [req-a2f45d32-ee0e-4c52-adba-575e163b4f3d req-51e884b8-66a7-4e31-9fb3-e4249d146fab service nova] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Received event network-changed-ff971a6c-7fab-4c04-a75d-259986b9fce0 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1120.437381] env[70020]: DEBUG nova.compute.manager [req-a2f45d32-ee0e-4c52-adba-575e163b4f3d req-51e884b8-66a7-4e31-9fb3-e4249d146fab service nova] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Refreshing instance network info cache due to event network-changed-ff971a6c-7fab-4c04-a75d-259986b9fce0. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1120.437635] env[70020]: DEBUG oslo_concurrency.lockutils [req-a2f45d32-ee0e-4c52-adba-575e163b4f3d req-51e884b8-66a7-4e31-9fb3-e4249d146fab service nova] Acquiring lock "refresh_cache-ce4796b0-4ad2-4468-9898-aaedce6dcd32" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1120.437782] env[70020]: DEBUG oslo_concurrency.lockutils [req-a2f45d32-ee0e-4c52-adba-575e163b4f3d req-51e884b8-66a7-4e31-9fb3-e4249d146fab service nova] Acquired lock "refresh_cache-ce4796b0-4ad2-4468-9898-aaedce6dcd32" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1120.437941] env[70020]: DEBUG nova.network.neutron [req-a2f45d32-ee0e-4c52-adba-575e163b4f3d req-51e884b8-66a7-4e31-9fb3-e4249d146fab service nova] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Refreshing network info cache for port ff971a6c-7fab-4c04-a75d-259986b9fce0 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1120.485242] env[70020]: DEBUG oslo_vmware.api [None req-f3930ad2-4c80-4d81-9576-5719a28ab5a2 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3618992, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.561589] env[70020]: DEBUG nova.objects.instance [None req-378a9140-6120-45f2-8ef7-7980b8b4bc4b tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lazy-loading 'flavor' on Instance uuid 8dbb1de0-38de-493f-9512-b8754bab7bcb {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1120.711113] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3618995, 'name': CreateVM_Task, 'duration_secs': 0.322584} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.711299] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1120.712102] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1120.712270] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1120.712581] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1120.712837] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da0c95d1-77f5-470d-90cd-67cb0a7a22c2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.717433] env[70020]: DEBUG oslo_vmware.api [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1120.717433] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52b67d3c-9669-3a3f-7c1e-70b987258a72" [ 1120.717433] env[70020]: _type = "Task" [ 1120.717433] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.725349] env[70020]: DEBUG oslo_vmware.api [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b67d3c-9669-3a3f-7c1e-70b987258a72, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.931810] env[70020]: DEBUG oslo_concurrency.lockutils [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "1e9d39d5-40fd-40b7-9421-94e0bff0314e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1120.932059] env[70020]: DEBUG oslo_concurrency.lockutils [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "1e9d39d5-40fd-40b7-9421-94e0bff0314e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1120.983008] env[70020]: DEBUG oslo_vmware.api [None req-f3930ad2-4c80-4d81-9576-5719a28ab5a2 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3618992, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.002744] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bc753ec-90a1-402b-a67e-6b97e8bc7256 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.010450] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68e7e9b1-96a1-4de0-a953-006e7994fee6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.042111] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60424e8a-ca15-4113-b467-0554c8d43f79 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.049583] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b3a8dd4-aabe-45ea-8b3c-9c0d562bb175 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.062147] env[70020]: DEBUG nova.compute.provider_tree [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1121.067904] env[70020]: DEBUG oslo_concurrency.lockutils [None req-378a9140-6120-45f2-8ef7-7980b8b4bc4b tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "8dbb1de0-38de-493f-9512-b8754bab7bcb" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.278s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1121.172368] env[70020]: DEBUG nova.network.neutron [req-a2f45d32-ee0e-4c52-adba-575e163b4f3d req-51e884b8-66a7-4e31-9fb3-e4249d146fab service nova] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Updated VIF entry in instance network info cache for port ff971a6c-7fab-4c04-a75d-259986b9fce0. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1121.172742] env[70020]: DEBUG nova.network.neutron [req-a2f45d32-ee0e-4c52-adba-575e163b4f3d req-51e884b8-66a7-4e31-9fb3-e4249d146fab service nova] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Updating instance_info_cache with network_info: [{"id": "ff971a6c-7fab-4c04-a75d-259986b9fce0", "address": "fa:16:3e:34:e6:98", "network": {"id": "c7c6ab1d-12b0-4699-ab0e-47b8d23ee3bc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2032377329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3f6d704dd464768953c41d34d34d944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6a6f7bb-6106-4cfd-9aef-b85628d0cefa", "external-id": "nsx-vlan-transportzone-194", "segmentation_id": 194, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff971a6c-7f", "ovs_interfaceid": "ff971a6c-7fab-4c04-a75d-259986b9fce0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1121.229322] env[70020]: DEBUG oslo_vmware.api [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b67d3c-9669-3a3f-7c1e-70b987258a72, 'name': SearchDatastore_Task, 'duration_secs': 0.010059} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.229605] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1121.229836] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1121.230074] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1121.230224] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1121.230403] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1121.230648] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a7bc45c2-54e9-4fb5-b938-5ae8cc9561dd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.240807] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1121.240973] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1121.242033] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52798af8-a7da-4a7b-a1fe-dfbb20e8ddd6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.246703] env[70020]: DEBUG oslo_vmware.api [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1121.246703] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52b09060-02e2-2c29-ee2c-b8b42b8a83d7" [ 1121.246703] env[70020]: _type = "Task" [ 1121.246703] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.254212] env[70020]: DEBUG oslo_vmware.api [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b09060-02e2-2c29-ee2c-b8b42b8a83d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.388931] env[70020]: INFO nova.compute.manager [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Rebuilding instance [ 1121.431362] env[70020]: DEBUG nova.compute.manager [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1121.432224] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12da7106-fbc2-4458-8e01-3926ed6c02aa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.434883] env[70020]: DEBUG nova.compute.manager [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1121.486602] env[70020]: DEBUG oslo_vmware.api [None req-f3930ad2-4c80-4d81-9576-5719a28ab5a2 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3618992, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.568035] env[70020]: DEBUG nova.scheduler.client.report [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1121.675201] env[70020]: DEBUG oslo_concurrency.lockutils [req-a2f45d32-ee0e-4c52-adba-575e163b4f3d req-51e884b8-66a7-4e31-9fb3-e4249d146fab service nova] Releasing lock "refresh_cache-ce4796b0-4ad2-4468-9898-aaedce6dcd32" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1121.758647] env[70020]: DEBUG oslo_vmware.api [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b09060-02e2-2c29-ee2c-b8b42b8a83d7, 'name': SearchDatastore_Task, 'duration_secs': 0.009489} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.759838] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b71aafc8-faf7-4e89-beee-4e6cdb242f81 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.766255] env[70020]: DEBUG oslo_vmware.api [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1121.766255] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52e182bd-aa26-b075-ef2d-e5fe8070cff0" [ 1121.766255] env[70020]: _type = "Task" [ 1121.766255] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.773529] env[70020]: DEBUG oslo_vmware.api [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52e182bd-aa26-b075-ef2d-e5fe8070cff0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.956785] env[70020]: DEBUG oslo_concurrency.lockutils [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1121.985034] env[70020]: DEBUG oslo_vmware.api [None req-f3930ad2-4c80-4d81-9576-5719a28ab5a2 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3618992, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.072955] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.273s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1122.073536] env[70020]: DEBUG nova.compute.manager [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1122.076328] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf74d57d-baa8-4165-8045-1dd70090ebff tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.769s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1122.076570] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf74d57d-baa8-4165-8045-1dd70090ebff tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1122.078565] env[70020]: DEBUG oslo_concurrency.lockutils [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.644s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1122.080399] env[70020]: INFO nova.compute.claims [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: 899183eb-ba25-491f-b981-77a33239ed74] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1122.098444] env[70020]: INFO nova.scheduler.client.report [None req-bf74d57d-baa8-4165-8045-1dd70090ebff tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Deleted allocations for instance 8adadb2e-2a20-45b1-bed8-34e09df25f39 [ 1122.277472] env[70020]: DEBUG oslo_vmware.api [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52e182bd-aa26-b075-ef2d-e5fe8070cff0, 'name': SearchDatastore_Task, 'duration_secs': 0.009365} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.277747] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1122.278072] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] ce4796b0-4ad2-4468-9898-aaedce6dcd32/ce4796b0-4ad2-4468-9898-aaedce6dcd32.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1122.278376] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-893a03b7-313f-47bb-a72b-6bf1feabf882 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.285972] env[70020]: DEBUG oslo_vmware.api [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1122.285972] env[70020]: value = "task-3618996" [ 1122.285972] env[70020]: _type = "Task" [ 1122.285972] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.294203] env[70020]: DEBUG oslo_vmware.api [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618996, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.447043] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1122.447430] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ddb202bd-352d-4844-b960-2a041ee48acc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.455573] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1122.455573] env[70020]: value = "task-3618997" [ 1122.455573] env[70020]: _type = "Task" [ 1122.455573] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.465862] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618997, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.484494] env[70020]: DEBUG oslo_vmware.api [None req-f3930ad2-4c80-4d81-9576-5719a28ab5a2 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3618992, 'name': ReconfigVM_Task} progress is 18%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.585687] env[70020]: DEBUG nova.compute.utils [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1122.590466] env[70020]: DEBUG nova.compute.manager [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1122.590713] env[70020]: DEBUG nova.network.neutron [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1122.608137] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf74d57d-baa8-4165-8045-1dd70090ebff tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "8adadb2e-2a20-45b1-bed8-34e09df25f39" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.909s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1122.642681] env[70020]: DEBUG nova.policy [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1ebabdad8aa843f28165fcd167382c60', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cfa7d3b1f5a14c60b19cde5030c2f0a2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1122.795780] env[70020]: DEBUG oslo_vmware.api [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618996, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.432906} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.796014] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] ce4796b0-4ad2-4468-9898-aaedce6dcd32/ce4796b0-4ad2-4468-9898-aaedce6dcd32.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1122.796240] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1122.796547] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-09b667c2-651d-4ecc-8bfa-726b6d2d1176 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.803023] env[70020]: DEBUG oslo_vmware.api [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1122.803023] env[70020]: value = "task-3618998" [ 1122.803023] env[70020]: _type = "Task" [ 1122.803023] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.811176] env[70020]: DEBUG oslo_vmware.api [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618998, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.909021] env[70020]: DEBUG nova.network.neutron [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Successfully created port: 4332b789-1993-4df4-8099-15089bf507db {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1122.966914] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618997, 'name': PowerOffVM_Task, 'duration_secs': 0.397058} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.967197] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1122.984522] env[70020]: DEBUG oslo_vmware.api [None req-f3930ad2-4c80-4d81-9576-5719a28ab5a2 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3618992, 'name': ReconfigVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.019921] env[70020]: INFO nova.compute.manager [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Detaching volume 1acb6a62-8f9b-4b43-be82-4306b549a1ba [ 1123.053903] env[70020]: INFO nova.virt.block_device [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Attempting to driver detach volume 1acb6a62-8f9b-4b43-be82-4306b549a1ba from mountpoint /dev/sdb [ 1123.054271] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Volume detach. Driver type: vmdk {{(pid=70020) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1123.054521] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721804', 'volume_id': '1acb6a62-8f9b-4b43-be82-4306b549a1ba', 'name': 'volume-1acb6a62-8f9b-4b43-be82-4306b549a1ba', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8dbb1de0-38de-493f-9512-b8754bab7bcb', 'attached_at': '', 'detached_at': '', 'volume_id': '1acb6a62-8f9b-4b43-be82-4306b549a1ba', 'serial': '1acb6a62-8f9b-4b43-be82-4306b549a1ba'} {{(pid=70020) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1123.055414] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-782ad1ce-67ec-4ce5-b680-0f3d07aff85f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.079393] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3337001-e957-4ab2-8c2e-a38b5d8c75ef {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.086609] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fd67050-64f7-4b4e-8781-f69ec330d8a7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.090907] env[70020]: DEBUG nova.compute.manager [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1123.115034] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2f75156-85b7-4b2a-be1d-f180e1a914bb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.131530] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] The volume has not been displaced from its original location: [datastore1] volume-1acb6a62-8f9b-4b43-be82-4306b549a1ba/volume-1acb6a62-8f9b-4b43-be82-4306b549a1ba.vmdk. No consolidation needed. {{(pid=70020) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1123.136963] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Reconfiguring VM instance instance-00000063 to detach disk 2001 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1123.137798] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f163dac1-46ed-4866-90e8-15c5ad3cbf3d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.161022] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1123.161022] env[70020]: value = "task-3618999" [ 1123.161022] env[70020]: _type = "Task" [ 1123.161022] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.168290] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618999, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.312990] env[70020]: DEBUG oslo_vmware.api [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618998, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.315419] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06d0eae7-b117-466e-b5b1-6c07fdc60862 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.321958] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab27aa19-23ae-454d-ac09-642250536bec {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.352149] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-affd8c13-8272-44d0-9008-ed2a7bc5817c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.363506] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56ab2b82-0f10-45bc-bbee-fa6e47dba023 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.376860] env[70020]: DEBUG nova.compute.provider_tree [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1123.486336] env[70020]: DEBUG oslo_vmware.api [None req-f3930ad2-4c80-4d81-9576-5719a28ab5a2 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3618992, 'name': ReconfigVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.668613] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3618999, 'name': ReconfigVM_Task, 'duration_secs': 0.210433} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.668847] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Reconfigured VM instance instance-00000063 to detach disk 2001 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1123.673790] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e809b75a-63d9-43cd-9fc0-1a999d0ede0f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.687944] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1123.687944] env[70020]: value = "task-3619000" [ 1123.687944] env[70020]: _type = "Task" [ 1123.687944] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.697688] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619000, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.813816] env[70020]: DEBUG oslo_vmware.api [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618998, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.898114] env[70020]: ERROR nova.scheduler.client.report [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [req-476a45a2-b65e-4a85-8d16-1a41f535e37e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-476a45a2-b65e-4a85-8d16-1a41f535e37e"}]} [ 1123.913376] env[70020]: DEBUG nova.scheduler.client.report [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1123.925245] env[70020]: DEBUG nova.scheduler.client.report [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1123.925501] env[70020]: DEBUG nova.compute.provider_tree [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1123.935391] env[70020]: DEBUG nova.scheduler.client.report [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1123.950977] env[70020]: DEBUG nova.scheduler.client.report [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1123.987568] env[70020]: DEBUG oslo_vmware.api [None req-f3930ad2-4c80-4d81-9576-5719a28ab5a2 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3618992, 'name': ReconfigVM_Task, 'duration_secs': 6.787475} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.987812] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f3930ad2-4c80-4d81-9576-5719a28ab5a2 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Releasing lock "c972e083-8c91-4875-a8c6-8257b06c93a1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1123.988027] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-f3930ad2-4c80-4d81-9576-5719a28ab5a2 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Reconfigured VM to detach interface {{(pid=70020) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1124.096762] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "9962b718-ca31-4f09-91f3-133dd68612ad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1124.096995] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "9962b718-ca31-4f09-91f3-133dd68612ad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1124.119558] env[70020]: DEBUG nova.compute.manager [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1124.123081] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-175a2f47-7a67-44ef-b2e5-996d415949e7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.131416] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37d87a37-c9b2-4a1a-bd6d-7971f266eb77 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.165428] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bf24e20-079e-43b3-97d4-d7fc01a217ed {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.171144] env[70020]: DEBUG nova.virt.hardware [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1124.171449] env[70020]: DEBUG nova.virt.hardware [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1124.172145] env[70020]: DEBUG nova.virt.hardware [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1124.172407] env[70020]: DEBUG nova.virt.hardware [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1124.172606] env[70020]: DEBUG nova.virt.hardware [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1124.172761] env[70020]: DEBUG nova.virt.hardware [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1124.172974] env[70020]: DEBUG nova.virt.hardware [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1124.173162] env[70020]: DEBUG nova.virt.hardware [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1124.173328] env[70020]: DEBUG nova.virt.hardware [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1124.173554] env[70020]: DEBUG nova.virt.hardware [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1124.173742] env[70020]: DEBUG nova.virt.hardware [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1124.174524] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bc81c17-d097-4813-b1af-42bef4fb31c2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.184062] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24334940-ef28-41fb-a9b1-eb7cf85e30c7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.189463] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eda29927-5c9f-4c21-8288-1cc8ed88bd77 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.206546] env[70020]: DEBUG nova.compute.provider_tree [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1124.218094] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619000, 'name': ReconfigVM_Task, 'duration_secs': 0.222613} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.221262] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721804', 'volume_id': '1acb6a62-8f9b-4b43-be82-4306b549a1ba', 'name': 'volume-1acb6a62-8f9b-4b43-be82-4306b549a1ba', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8dbb1de0-38de-493f-9512-b8754bab7bcb', 'attached_at': '', 'detached_at': '', 'volume_id': '1acb6a62-8f9b-4b43-be82-4306b549a1ba', 'serial': '1acb6a62-8f9b-4b43-be82-4306b549a1ba'} {{(pid=70020) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1124.261944] env[70020]: DEBUG nova.scheduler.client.report [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Updated inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with generation 149 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1124.261944] env[70020]: DEBUG nova.compute.provider_tree [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Updating resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d generation from 149 to 150 during operation: update_inventory {{(pid=70020) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1124.261944] env[70020]: DEBUG nova.compute.provider_tree [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1124.310725] env[70020]: DEBUG nova.compute.manager [req-57df8c6c-f66a-4b01-a50e-f9d4f770e54b req-d234f2ef-39e6-4247-8cfb-ede099f3a16d service nova] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Received event network-vif-deleted-5380cda0-d51f-4970-a418-c89ed561db06 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1124.310927] env[70020]: INFO nova.compute.manager [req-57df8c6c-f66a-4b01-a50e-f9d4f770e54b req-d234f2ef-39e6-4247-8cfb-ede099f3a16d service nova] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Neutron deleted interface 5380cda0-d51f-4970-a418-c89ed561db06; detaching it from the instance and deleting it from the info cache [ 1124.311185] env[70020]: DEBUG nova.network.neutron [req-57df8c6c-f66a-4b01-a50e-f9d4f770e54b req-d234f2ef-39e6-4247-8cfb-ede099f3a16d service nova] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Updating instance_info_cache with network_info: [{"id": "179ff8c1-53f9-4484-9dce-1fd85174d71d", "address": "fa:16:3e:d7:53:7a", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap179ff8c1-53", "ovs_interfaceid": "179ff8c1-53f9-4484-9dce-1fd85174d71d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1124.317690] env[70020]: DEBUG oslo_vmware.api [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3618998, 'name': ExtendVirtualDisk_Task, 'duration_secs': 1.043593} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.317894] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1124.320699] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abf865a8-96a9-42ee-b123-4f4ebd48e706 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.340814] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] ce4796b0-4ad2-4468-9898-aaedce6dcd32/ce4796b0-4ad2-4468-9898-aaedce6dcd32.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1124.341351] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2eaa7117-9e83-4df9-9a0d-756fddb054f4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.360990] env[70020]: DEBUG oslo_vmware.api [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1124.360990] env[70020]: value = "task-3619001" [ 1124.360990] env[70020]: _type = "Task" [ 1124.360990] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.369131] env[70020]: DEBUG oslo_vmware.api [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619001, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.442864] env[70020]: DEBUG nova.network.neutron [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Successfully updated port: 4332b789-1993-4df4-8099-15089bf507db {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1124.599940] env[70020]: DEBUG nova.compute.manager [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1124.764074] env[70020]: DEBUG oslo_concurrency.lockutils [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.685s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1124.764616] env[70020]: DEBUG nova.compute.manager [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: 899183eb-ba25-491f-b981-77a33239ed74] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1124.767450] env[70020]: DEBUG oslo_concurrency.lockutils [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.131s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1124.768868] env[70020]: INFO nova.compute.claims [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1124.813893] env[70020]: DEBUG oslo_concurrency.lockutils [req-57df8c6c-f66a-4b01-a50e-f9d4f770e54b req-d234f2ef-39e6-4247-8cfb-ede099f3a16d service nova] Acquiring lock "c972e083-8c91-4875-a8c6-8257b06c93a1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1124.814084] env[70020]: DEBUG oslo_concurrency.lockutils [req-57df8c6c-f66a-4b01-a50e-f9d4f770e54b req-d234f2ef-39e6-4247-8cfb-ede099f3a16d service nova] Acquired lock "c972e083-8c91-4875-a8c6-8257b06c93a1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1124.815148] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5a587b4-2ea7-459f-ade7-ab7825353735 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.832967] env[70020]: DEBUG oslo_concurrency.lockutils [req-57df8c6c-f66a-4b01-a50e-f9d4f770e54b req-d234f2ef-39e6-4247-8cfb-ede099f3a16d service nova] Releasing lock "c972e083-8c91-4875-a8c6-8257b06c93a1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1124.833364] env[70020]: WARNING nova.compute.manager [req-57df8c6c-f66a-4b01-a50e-f9d4f770e54b req-d234f2ef-39e6-4247-8cfb-ede099f3a16d service nova] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Detach interface failed, port_id=5380cda0-d51f-4970-a418-c89ed561db06, reason: No device with interface-id 5380cda0-d51f-4970-a418-c89ed561db06 exists on VM: nova.exception.NotFound: No device with interface-id 5380cda0-d51f-4970-a418-c89ed561db06 exists on VM [ 1124.871166] env[70020]: DEBUG oslo_vmware.api [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619001, 'name': ReconfigVM_Task, 'duration_secs': 0.268243} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.871440] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Reconfigured VM instance instance-0000006a to attach disk [datastore1] ce4796b0-4ad2-4468-9898-aaedce6dcd32/ce4796b0-4ad2-4468-9898-aaedce6dcd32.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1124.872089] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d82cd059-d25b-4ec4-8cb7-cced0b34c4df {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.879292] env[70020]: DEBUG oslo_vmware.api [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1124.879292] env[70020]: value = "task-3619002" [ 1124.879292] env[70020]: _type = "Task" [ 1124.879292] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.892108] env[70020]: DEBUG oslo_vmware.api [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619002, 'name': Rename_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.946085] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "refresh_cache-c0a78ace-307e-4156-beb3-a53061acff7f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1124.946085] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquired lock "refresh_cache-c0a78ace-307e-4156-beb3-a53061acff7f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1124.946085] env[70020]: DEBUG nova.network.neutron [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1125.120685] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1125.233570] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d303fe78-1c34-475b-8067-bce2e43dfcc0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "c972e083-8c91-4875-a8c6-8257b06c93a1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1125.233825] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d303fe78-1c34-475b-8067-bce2e43dfcc0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "c972e083-8c91-4875-a8c6-8257b06c93a1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1125.234146] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d303fe78-1c34-475b-8067-bce2e43dfcc0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "c972e083-8c91-4875-a8c6-8257b06c93a1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1125.234339] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d303fe78-1c34-475b-8067-bce2e43dfcc0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "c972e083-8c91-4875-a8c6-8257b06c93a1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1125.234513] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d303fe78-1c34-475b-8067-bce2e43dfcc0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "c972e083-8c91-4875-a8c6-8257b06c93a1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1125.236636] env[70020]: INFO nova.compute.manager [None req-d303fe78-1c34-475b-8067-bce2e43dfcc0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Terminating instance [ 1125.274652] env[70020]: DEBUG nova.compute.utils [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1125.278466] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1125.278745] env[70020]: DEBUG nova.compute.manager [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: 899183eb-ba25-491f-b981-77a33239ed74] Not allocating networking since 'none' was specified. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1125.279290] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3874f099-af8d-49e1-af35-3d8f65fefe67 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.287250] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f3930ad2-4c80-4d81-9576-5719a28ab5a2 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "refresh_cache-c972e083-8c91-4875-a8c6-8257b06c93a1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1125.287408] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f3930ad2-4c80-4d81-9576-5719a28ab5a2 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquired lock "refresh_cache-c972e083-8c91-4875-a8c6-8257b06c93a1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1125.287579] env[70020]: DEBUG nova.network.neutron [None req-f3930ad2-4c80-4d81-9576-5719a28ab5a2 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1125.288652] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1125.288652] env[70020]: value = "task-3619003" [ 1125.288652] env[70020]: _type = "Task" [ 1125.288652] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.299501] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] VM already powered off {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1125.299501] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Volume detach. Driver type: vmdk {{(pid=70020) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1125.299628] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721804', 'volume_id': '1acb6a62-8f9b-4b43-be82-4306b549a1ba', 'name': 'volume-1acb6a62-8f9b-4b43-be82-4306b549a1ba', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8dbb1de0-38de-493f-9512-b8754bab7bcb', 'attached_at': '', 'detached_at': '', 'volume_id': '1acb6a62-8f9b-4b43-be82-4306b549a1ba', 'serial': '1acb6a62-8f9b-4b43-be82-4306b549a1ba'} {{(pid=70020) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1125.300327] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4ed372e-6148-4039-b748-328aee2d44fb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.319324] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7dd5a46-992d-4feb-bcc9-3d87612298d0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.325285] env[70020]: WARNING nova.virt.vmwareapi.driver [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 1125.325599] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1125.326313] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d306538-e6d4-4c40-9c5b-013a75b1483e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.332533] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1125.332747] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-848ef98d-2057-4ff7-ac30-8ef2d1a0a3e9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.390449] env[70020]: DEBUG oslo_vmware.api [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619002, 'name': Rename_Task, 'duration_secs': 0.145048} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.390759] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1125.391014] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7b14c377-ba9e-4833-8682-b6d9de9a8b45 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.395786] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1125.395786] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1125.395963] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Deleting the datastore file [datastore1] 8dbb1de0-38de-493f-9512-b8754bab7bcb {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1125.396260] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c0fa0795-8e11-4ef8-8cec-48616ff39c3a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.403031] env[70020]: DEBUG oslo_vmware.api [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1125.403031] env[70020]: value = "task-3619005" [ 1125.403031] env[70020]: _type = "Task" [ 1125.403031] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.406556] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1125.406556] env[70020]: value = "task-3619006" [ 1125.406556] env[70020]: _type = "Task" [ 1125.406556] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.413418] env[70020]: DEBUG oslo_vmware.api [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619005, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.418013] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619006, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.481514] env[70020]: DEBUG nova.network.neutron [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1125.614325] env[70020]: DEBUG nova.network.neutron [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Updating instance_info_cache with network_info: [{"id": "4332b789-1993-4df4-8099-15089bf507db", "address": "fa:16:3e:1a:0a:5b", "network": {"id": "83a3ee04-e0ff-40e7-ae51-c463add43abb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2003290189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cfa7d3b1f5a14c60b19cde5030c2f0a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4332b789-19", "ovs_interfaceid": "4332b789-1993-4df4-8099-15089bf507db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1125.740392] env[70020]: DEBUG nova.compute.manager [None req-d303fe78-1c34-475b-8067-bce2e43dfcc0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1125.740648] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d303fe78-1c34-475b-8067-bce2e43dfcc0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1125.742032] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-737ca8e0-8203-4f31-8131-e84feca4710f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.750548] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-d303fe78-1c34-475b-8067-bce2e43dfcc0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1125.750548] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-61cee306-936f-40de-b0ce-2c8f582b229c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.755665] env[70020]: DEBUG oslo_vmware.api [None req-d303fe78-1c34-475b-8067-bce2e43dfcc0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1125.755665] env[70020]: value = "task-3619007" [ 1125.755665] env[70020]: _type = "Task" [ 1125.755665] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.763586] env[70020]: DEBUG oslo_vmware.api [None req-d303fe78-1c34-475b-8067-bce2e43dfcc0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619007, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.779016] env[70020]: DEBUG nova.compute.manager [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: 899183eb-ba25-491f-b981-77a33239ed74] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1125.913685] env[70020]: DEBUG oslo_vmware.api [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619005, 'name': PowerOnVM_Task, 'duration_secs': 0.472197} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.916324] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1125.916475] env[70020]: INFO nova.compute.manager [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Took 7.65 seconds to spawn the instance on the hypervisor. [ 1125.916654] env[70020]: DEBUG nova.compute.manager [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1125.917954] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cd28447-43d8-4d8d-9415-eb138c2dfa06 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.924354] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619006, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.128109} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.925022] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1125.925823] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1125.925823] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1126.000224] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9be6da02-3204-421b-b3f0-39a1383a03f0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.007984] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d2b5c34-3817-4006-8cf3-e28c69ecbec1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.041104] env[70020]: DEBUG nova.network.neutron [None req-f3930ad2-4c80-4d81-9576-5719a28ab5a2 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Updating instance_info_cache with network_info: [{"id": "179ff8c1-53f9-4484-9dce-1fd85174d71d", "address": "fa:16:3e:d7:53:7a", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap179ff8c1-53", "ovs_interfaceid": "179ff8c1-53f9-4484-9dce-1fd85174d71d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1126.041939] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b99d360-8427-4f94-aaeb-6798cd2cd465 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.049410] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eede812b-d862-4d26-ab2e-e35fb8f8dce6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.064397] env[70020]: DEBUG nova.compute.provider_tree [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1126.117064] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Releasing lock "refresh_cache-c0a78ace-307e-4156-beb3-a53061acff7f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1126.117332] env[70020]: DEBUG nova.compute.manager [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Instance network_info: |[{"id": "4332b789-1993-4df4-8099-15089bf507db", "address": "fa:16:3e:1a:0a:5b", "network": {"id": "83a3ee04-e0ff-40e7-ae51-c463add43abb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2003290189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cfa7d3b1f5a14c60b19cde5030c2f0a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4332b789-19", "ovs_interfaceid": "4332b789-1993-4df4-8099-15089bf507db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1126.117850] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1a:0a:5b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d4ef133-b6f3-41d1-add4-92a1482195cf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4332b789-1993-4df4-8099-15089bf507db', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1126.126495] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1126.126724] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1126.126946] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d49ee0f8-e028-4afe-8dc0-82d0d53ce230 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.147548] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1126.147548] env[70020]: value = "task-3619008" [ 1126.147548] env[70020]: _type = "Task" [ 1126.147548] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.155085] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619008, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.265466] env[70020]: DEBUG oslo_vmware.api [None req-d303fe78-1c34-475b-8067-bce2e43dfcc0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619007, 'name': PowerOffVM_Task, 'duration_secs': 0.206553} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.265817] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-d303fe78-1c34-475b-8067-bce2e43dfcc0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1126.266047] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d303fe78-1c34-475b-8067-bce2e43dfcc0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1126.266339] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-374f59f6-e35d-496c-9c8f-1fb83cc411b4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.326277] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d303fe78-1c34-475b-8067-bce2e43dfcc0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1126.326596] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d303fe78-1c34-475b-8067-bce2e43dfcc0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1126.326815] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-d303fe78-1c34-475b-8067-bce2e43dfcc0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Deleting the datastore file [datastore1] c972e083-8c91-4875-a8c6-8257b06c93a1 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1126.327826] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9fd65182-e345-4519-96b3-499fc443d72e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.334882] env[70020]: DEBUG oslo_vmware.api [None req-d303fe78-1c34-475b-8067-bce2e43dfcc0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1126.334882] env[70020]: value = "task-3619010" [ 1126.334882] env[70020]: _type = "Task" [ 1126.334882] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.343643] env[70020]: DEBUG oslo_vmware.api [None req-d303fe78-1c34-475b-8067-bce2e43dfcc0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619010, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.345905] env[70020]: DEBUG nova.compute.manager [req-e12a9e21-6753-4c67-acf5-2f6c9be070ef req-6f3b642a-f2f8-4746-a7e9-bebe9ca1c74b service nova] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Received event network-vif-plugged-4332b789-1993-4df4-8099-15089bf507db {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1126.346130] env[70020]: DEBUG oslo_concurrency.lockutils [req-e12a9e21-6753-4c67-acf5-2f6c9be070ef req-6f3b642a-f2f8-4746-a7e9-bebe9ca1c74b service nova] Acquiring lock "c0a78ace-307e-4156-beb3-a53061acff7f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1126.346407] env[70020]: DEBUG oslo_concurrency.lockutils [req-e12a9e21-6753-4c67-acf5-2f6c9be070ef req-6f3b642a-f2f8-4746-a7e9-bebe9ca1c74b service nova] Lock "c0a78ace-307e-4156-beb3-a53061acff7f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1126.346539] env[70020]: DEBUG oslo_concurrency.lockutils [req-e12a9e21-6753-4c67-acf5-2f6c9be070ef req-6f3b642a-f2f8-4746-a7e9-bebe9ca1c74b service nova] Lock "c0a78ace-307e-4156-beb3-a53061acff7f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1126.346716] env[70020]: DEBUG nova.compute.manager [req-e12a9e21-6753-4c67-acf5-2f6c9be070ef req-6f3b642a-f2f8-4746-a7e9-bebe9ca1c74b service nova] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] No waiting events found dispatching network-vif-plugged-4332b789-1993-4df4-8099-15089bf507db {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1126.346895] env[70020]: WARNING nova.compute.manager [req-e12a9e21-6753-4c67-acf5-2f6c9be070ef req-6f3b642a-f2f8-4746-a7e9-bebe9ca1c74b service nova] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Received unexpected event network-vif-plugged-4332b789-1993-4df4-8099-15089bf507db for instance with vm_state building and task_state spawning. [ 1126.347081] env[70020]: DEBUG nova.compute.manager [req-e12a9e21-6753-4c67-acf5-2f6c9be070ef req-6f3b642a-f2f8-4746-a7e9-bebe9ca1c74b service nova] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Received event network-changed-4332b789-1993-4df4-8099-15089bf507db {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1126.347272] env[70020]: DEBUG nova.compute.manager [req-e12a9e21-6753-4c67-acf5-2f6c9be070ef req-6f3b642a-f2f8-4746-a7e9-bebe9ca1c74b service nova] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Refreshing instance network info cache due to event network-changed-4332b789-1993-4df4-8099-15089bf507db. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1126.347500] env[70020]: DEBUG oslo_concurrency.lockutils [req-e12a9e21-6753-4c67-acf5-2f6c9be070ef req-6f3b642a-f2f8-4746-a7e9-bebe9ca1c74b service nova] Acquiring lock "refresh_cache-c0a78ace-307e-4156-beb3-a53061acff7f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1126.347646] env[70020]: DEBUG oslo_concurrency.lockutils [req-e12a9e21-6753-4c67-acf5-2f6c9be070ef req-6f3b642a-f2f8-4746-a7e9-bebe9ca1c74b service nova] Acquired lock "refresh_cache-c0a78ace-307e-4156-beb3-a53061acff7f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1126.347861] env[70020]: DEBUG nova.network.neutron [req-e12a9e21-6753-4c67-acf5-2f6c9be070ef req-6f3b642a-f2f8-4746-a7e9-bebe9ca1c74b service nova] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Refreshing network info cache for port 4332b789-1993-4df4-8099-15089bf507db {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1126.438465] env[70020]: INFO nova.virt.block_device [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Booting with volume 1acb6a62-8f9b-4b43-be82-4306b549a1ba at /dev/sdb [ 1126.448513] env[70020]: INFO nova.compute.manager [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Took 19.51 seconds to build instance. [ 1126.477743] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ecf7154b-3c53-4098-861f-f45cbf71a9d1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.487282] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ca5ef96-14b3-4a12-ae53-ab1c4580f5e4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.520344] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-03f05576-dfc4-459c-8499-1d324e263796 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.528232] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c4c3af7-c17d-4f99-8a78-8e49599ea689 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.545359] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f3930ad2-4c80-4d81-9576-5719a28ab5a2 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Releasing lock "refresh_cache-c972e083-8c91-4875-a8c6-8257b06c93a1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1126.558237] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d512f6b-06c7-45cf-9af3-7e696bb3adc6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.564976] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3789a877-f963-4871-a55d-c12357a068c4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.580991] env[70020]: DEBUG nova.virt.block_device [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Updating existing volume attachment record: 7b15f559-ff7d-4e4b-8bd8-691b19c60855 {{(pid=70020) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1126.595231] env[70020]: DEBUG nova.scheduler.client.report [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Updated inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with generation 150 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1126.595717] env[70020]: DEBUG nova.compute.provider_tree [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Updating resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d generation from 150 to 151 during operation: update_inventory {{(pid=70020) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1126.595761] env[70020]: DEBUG nova.compute.provider_tree [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1126.658438] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619008, 'name': CreateVM_Task, 'duration_secs': 0.33414} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.658735] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1126.659366] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1126.659483] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1126.659794] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1126.660072] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46382eb2-5f34-4797-90dd-60abcf7b9393 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.665017] env[70020]: DEBUG oslo_vmware.api [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 1126.665017] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52a83622-c042-6897-55b3-8ee5794a5b51" [ 1126.665017] env[70020]: _type = "Task" [ 1126.665017] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.673171] env[70020]: DEBUG oslo_vmware.api [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52a83622-c042-6897-55b3-8ee5794a5b51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.793406] env[70020]: DEBUG nova.compute.manager [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: 899183eb-ba25-491f-b981-77a33239ed74] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1126.819916] env[70020]: DEBUG nova.virt.hardware [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1126.820187] env[70020]: DEBUG nova.virt.hardware [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1126.820341] env[70020]: DEBUG nova.virt.hardware [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1126.820522] env[70020]: DEBUG nova.virt.hardware [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1126.820666] env[70020]: DEBUG nova.virt.hardware [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1126.820812] env[70020]: DEBUG nova.virt.hardware [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1126.821031] env[70020]: DEBUG nova.virt.hardware [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1126.821197] env[70020]: DEBUG nova.virt.hardware [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1126.821360] env[70020]: DEBUG nova.virt.hardware [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1126.821552] env[70020]: DEBUG nova.virt.hardware [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1126.821773] env[70020]: DEBUG nova.virt.hardware [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1126.822660] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f54df29f-31ad-4eee-96d5-52fbcd29cd43 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.831909] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c987f4f0-ac6a-46aa-a2d2-682c91f514ba {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.852152] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: 899183eb-ba25-491f-b981-77a33239ed74] Instance VIF info [] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1126.857741] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Creating folder: Project (c8f934091c84401b97ed346b5f6d8e3c). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1126.861057] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-83932f43-937b-494a-809b-4c4727b05877 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.862741] env[70020]: DEBUG oslo_vmware.api [None req-d303fe78-1c34-475b-8067-bce2e43dfcc0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619010, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161976} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.863899] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-d303fe78-1c34-475b-8067-bce2e43dfcc0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1126.863899] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d303fe78-1c34-475b-8067-bce2e43dfcc0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1126.863899] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d303fe78-1c34-475b-8067-bce2e43dfcc0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1126.863899] env[70020]: INFO nova.compute.manager [None req-d303fe78-1c34-475b-8067-bce2e43dfcc0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1126.864214] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d303fe78-1c34-475b-8067-bce2e43dfcc0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1126.864604] env[70020]: DEBUG nova.compute.manager [-] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1126.864692] env[70020]: DEBUG nova.network.neutron [-] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1126.873194] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Created folder: Project (c8f934091c84401b97ed346b5f6d8e3c) in parent group-v721521. [ 1126.873396] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Creating folder: Instances. Parent ref: group-v721807. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1126.873665] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c709e943-c550-4608-a740-f6333ebd1a95 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.882350] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Created folder: Instances in parent group-v721807. [ 1126.883642] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1126.883642] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 899183eb-ba25-491f-b981-77a33239ed74] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1126.883642] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-480b7a7e-3e0a-4383-85b3-494a865ddef8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.899921] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1126.899921] env[70020]: value = "task-3619013" [ 1126.899921] env[70020]: _type = "Task" [ 1126.899921] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.909222] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619013, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.951245] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bd4e1eb0-4528-4421-8a36-d1c9bf1e3c84 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "ce4796b0-4ad2-4468-9898-aaedce6dcd32" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.023s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1127.049378] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f3930ad2-4c80-4d81-9576-5719a28ab5a2 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "interface-c972e083-8c91-4875-a8c6-8257b06c93a1-5380cda0-d51f-4970-a418-c89ed561db06" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.662s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1127.102513] env[70020]: DEBUG oslo_concurrency.lockutils [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.335s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1127.103154] env[70020]: DEBUG nova.compute.manager [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1127.108100] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 7.232s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1127.176156] env[70020]: DEBUG oslo_vmware.api [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52a83622-c042-6897-55b3-8ee5794a5b51, 'name': SearchDatastore_Task, 'duration_secs': 0.009338} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.176511] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1127.176783] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1127.177038] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1127.177193] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1127.177372] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1127.177967] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-104c0d18-8cfc-465e-b13a-6e34c340bb7c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.186014] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1127.186261] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1127.187290] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bdee227a-56b6-420b-a4f0-c1a7995b9ebb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.193670] env[70020]: DEBUG oslo_vmware.api [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 1127.193670] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52102774-ac90-3f4f-f587-4370266c6fe8" [ 1127.193670] env[70020]: _type = "Task" [ 1127.193670] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.201795] env[70020]: DEBUG oslo_vmware.api [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52102774-ac90-3f4f-f587-4370266c6fe8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.257396] env[70020]: DEBUG nova.network.neutron [req-e12a9e21-6753-4c67-acf5-2f6c9be070ef req-6f3b642a-f2f8-4746-a7e9-bebe9ca1c74b service nova] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Updated VIF entry in instance network info cache for port 4332b789-1993-4df4-8099-15089bf507db. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1127.258356] env[70020]: DEBUG nova.network.neutron [req-e12a9e21-6753-4c67-acf5-2f6c9be070ef req-6f3b642a-f2f8-4746-a7e9-bebe9ca1c74b service nova] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Updating instance_info_cache with network_info: [{"id": "4332b789-1993-4df4-8099-15089bf507db", "address": "fa:16:3e:1a:0a:5b", "network": {"id": "83a3ee04-e0ff-40e7-ae51-c463add43abb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2003290189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cfa7d3b1f5a14c60b19cde5030c2f0a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4332b789-19", "ovs_interfaceid": "4332b789-1993-4df4-8099-15089bf507db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1127.410129] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619013, 'name': CreateVM_Task, 'duration_secs': 0.352478} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.410312] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 899183eb-ba25-491f-b981-77a33239ed74] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1127.411019] env[70020]: DEBUG oslo_concurrency.lockutils [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1127.411311] env[70020]: DEBUG oslo_concurrency.lockutils [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1127.411530] env[70020]: DEBUG oslo_concurrency.lockutils [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1127.411886] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1561296b-6130-4da8-ada6-f93f7471a1f6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.416783] env[70020]: DEBUG oslo_vmware.api [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Waiting for the task: (returnval){ [ 1127.416783] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52758b1b-bcd8-86b2-be0a-2f0da74bbeb2" [ 1127.416783] env[70020]: _type = "Task" [ 1127.416783] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.424626] env[70020]: DEBUG oslo_vmware.api [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52758b1b-bcd8-86b2-be0a-2f0da74bbeb2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.611265] env[70020]: DEBUG nova.compute.utils [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1127.615582] env[70020]: DEBUG nova.compute.manager [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Not allocating networking since 'none' was specified. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1127.704577] env[70020]: DEBUG oslo_vmware.api [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52102774-ac90-3f4f-f587-4370266c6fe8, 'name': SearchDatastore_Task, 'duration_secs': 0.009462} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.705642] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e8dfeb1-aae5-4de2-9ce3-4ceca8074ad2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.711607] env[70020]: DEBUG oslo_vmware.api [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 1127.711607] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]529f7ff8-ffa3-3bf0-2b46-20c4113c26b6" [ 1127.711607] env[70020]: _type = "Task" [ 1127.711607] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.719323] env[70020]: DEBUG oslo_vmware.api [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]529f7ff8-ffa3-3bf0-2b46-20c4113c26b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.761623] env[70020]: DEBUG oslo_concurrency.lockutils [req-e12a9e21-6753-4c67-acf5-2f6c9be070ef req-6f3b642a-f2f8-4746-a7e9-bebe9ca1c74b service nova] Releasing lock "refresh_cache-c0a78ace-307e-4156-beb3-a53061acff7f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1127.927567] env[70020]: DEBUG oslo_vmware.api [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52758b1b-bcd8-86b2-be0a-2f0da74bbeb2, 'name': SearchDatastore_Task, 'duration_secs': 0.00899} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.927871] env[70020]: DEBUG oslo_concurrency.lockutils [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1127.928117] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: 899183eb-ba25-491f-b981-77a33239ed74] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1127.928333] env[70020]: DEBUG oslo_concurrency.lockutils [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1127.967271] env[70020]: DEBUG nova.network.neutron [-] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1128.116338] env[70020]: DEBUG nova.compute.manager [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1128.126603] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Skipping migration as instance is neither resizing nor live-migrating. {{(pid=70020) _update_usage_from_migrations /opt/stack/nova/nova/compute/resource_tracker.py:1563}} [ 1128.144693] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 13f6daa5-d859-40ed-b1b0-edd7717b8df3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1128.144912] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 96966bf2-a9ff-48ba-be3f-c767e7b6eedd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1128.145158] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1128.145298] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 5b69d3b2-c236-45f9-b35b-a9992b9c1c79 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1128.145512] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance c972e083-8c91-4875-a8c6-8257b06c93a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1128.145691] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 8dbb1de0-38de-493f-9512-b8754bab7bcb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1128.145982] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 9e7bd10b-3a78-48d8-9b66-e3646635be6d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1128.146109] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance f9d4837f-0e3f-4a83-9055-04d17ef3eb23 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1128.146284] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance c0a78ace-307e-4156-beb3-a53061acff7f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1128.146468] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 899183eb-ba25-491f-b981-77a33239ed74 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1128.146647] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance d0252c4e-0991-45b9-bf0b-b8e41093e518 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1128.225034] env[70020]: DEBUG oslo_vmware.api [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]529f7ff8-ffa3-3bf0-2b46-20c4113c26b6, 'name': SearchDatastore_Task, 'duration_secs': 0.01017} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.225387] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1128.225587] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] c0a78ace-307e-4156-beb3-a53061acff7f/c0a78ace-307e-4156-beb3-a53061acff7f.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1128.225868] env[70020]: DEBUG oslo_concurrency.lockutils [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1128.226069] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1128.226533] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-794f6184-dfb7-4d43-bbea-a23a8bea3619 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.229063] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c64fd9f4-e6b2-4284-8b06-6a5a8d61d816 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.238266] env[70020]: DEBUG oslo_vmware.api [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 1128.238266] env[70020]: value = "task-3619014" [ 1128.238266] env[70020]: _type = "Task" [ 1128.238266] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.239482] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1128.239650] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1128.244119] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e625df1-7004-431c-b044-8104c31c6252 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.260896] env[70020]: DEBUG oslo_vmware.api [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3619014, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.262853] env[70020]: DEBUG oslo_vmware.api [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Waiting for the task: (returnval){ [ 1128.262853] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52b86159-bcdf-6c36-6809-2e4e05383a7b" [ 1128.262853] env[70020]: _type = "Task" [ 1128.262853] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.273192] env[70020]: DEBUG oslo_vmware.api [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b86159-bcdf-6c36-6809-2e4e05383a7b, 'name': SearchDatastore_Task, 'duration_secs': 0.010063} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.275124] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92e1dde1-b334-4b49-a879-a5fac1cd45f3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.281125] env[70020]: DEBUG oslo_vmware.api [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Waiting for the task: (returnval){ [ 1128.281125] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52549cae-2c25-75ed-4f22-69f9999cc18d" [ 1128.281125] env[70020]: _type = "Task" [ 1128.281125] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.290368] env[70020]: DEBUG oslo_vmware.api [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52549cae-2c25-75ed-4f22-69f9999cc18d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.372846] env[70020]: DEBUG nova.compute.manager [req-777c96d7-03d7-4e16-9ec7-5dfb1ffcb22a req-2ed2f42a-ace3-40c5-9a97-85a4458329c6 service nova] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Received event network-vif-deleted-179ff8c1-53f9-4484-9dce-1fd85174d71d {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1128.469820] env[70020]: INFO nova.compute.manager [-] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Took 1.60 seconds to deallocate network for instance. [ 1128.650382] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 1e9d39d5-40fd-40b7-9421-94e0bff0314e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1128.719415] env[70020]: DEBUG nova.virt.hardware [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1128.719810] env[70020]: DEBUG nova.virt.hardware [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1128.719924] env[70020]: DEBUG nova.virt.hardware [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1128.720110] env[70020]: DEBUG nova.virt.hardware [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1128.720260] env[70020]: DEBUG nova.virt.hardware [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1128.720408] env[70020]: DEBUG nova.virt.hardware [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1128.720614] env[70020]: DEBUG nova.virt.hardware [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1128.720767] env[70020]: DEBUG nova.virt.hardware [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1128.720929] env[70020]: DEBUG nova.virt.hardware [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1128.721103] env[70020]: DEBUG nova.virt.hardware [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1128.721278] env[70020]: DEBUG nova.virt.hardware [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1128.722433] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b72d3bb-f7fb-4c4a-b2bc-a088b1c3594a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.731091] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-574816db-f12f-496c-8528-7a136ce5e9f9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.745195] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:44:89:42', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1002b79b-224e-41e3-a484-4245a767147a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8e1b8b9c-b1c2-448e-8d9c-621c1810194a', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1128.753206] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1128.756635] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1128.758331] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-abe96258-9328-483e-a8b1-2a35aba0543b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.776985] env[70020]: DEBUG nova.compute.manager [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Stashing vm_state: active {{(pid=70020) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1128.783405] env[70020]: DEBUG oslo_vmware.api [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3619014, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.434352} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.786538] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] c0a78ace-307e-4156-beb3-a53061acff7f/c0a78ace-307e-4156-beb3-a53061acff7f.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1128.786538] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1128.787702] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-022e32fe-9145-4bf6-91d8-f27dcc1d2445 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.789757] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1128.789757] env[70020]: value = "task-3619015" [ 1128.789757] env[70020]: _type = "Task" [ 1128.789757] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.798247] env[70020]: DEBUG oslo_vmware.api [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52549cae-2c25-75ed-4f22-69f9999cc18d, 'name': SearchDatastore_Task, 'duration_secs': 0.008479} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.798528] env[70020]: DEBUG oslo_vmware.api [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 1128.798528] env[70020]: value = "task-3619016" [ 1128.798528] env[70020]: _type = "Task" [ 1128.798528] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.799149] env[70020]: DEBUG oslo_concurrency.lockutils [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1128.799429] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 899183eb-ba25-491f-b981-77a33239ed74/899183eb-ba25-491f-b981-77a33239ed74.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1128.799692] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b6baaf9b-943e-4752-9e4f-6385494d6b6d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.807722] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619015, 'name': CreateVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.825474] env[70020]: DEBUG oslo_vmware.api [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3619016, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.827015] env[70020]: DEBUG oslo_vmware.api [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Waiting for the task: (returnval){ [ 1128.827015] env[70020]: value = "task-3619017" [ 1128.827015] env[70020]: _type = "Task" [ 1128.827015] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.836544] env[70020]: DEBUG oslo_vmware.api [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619017, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.977451] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d303fe78-1c34-475b-8067-bce2e43dfcc0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1129.128763] env[70020]: DEBUG nova.compute.manager [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1129.153493] env[70020]: DEBUG nova.virt.hardware [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1129.153752] env[70020]: DEBUG nova.virt.hardware [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1129.153909] env[70020]: DEBUG nova.virt.hardware [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1129.154104] env[70020]: DEBUG nova.virt.hardware [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1129.154303] env[70020]: DEBUG nova.virt.hardware [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1129.154541] env[70020]: DEBUG nova.virt.hardware [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1129.154717] env[70020]: DEBUG nova.virt.hardware [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1129.154879] env[70020]: DEBUG nova.virt.hardware [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1129.155122] env[70020]: DEBUG nova.virt.hardware [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1129.155325] env[70020]: DEBUG nova.virt.hardware [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1129.155521] env[70020]: DEBUG nova.virt.hardware [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1129.156363] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 9962b718-ca31-4f09-91f3-133dd68612ad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1129.156530] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Migration 4939402d-e13e-49ea-912f-3c8637ee0898 is active on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 192, 'DISK_GB': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1129.156657] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance ce4796b0-4ad2-4468-9898-aaedce6dcd32 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1129.156936] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Total usable vcpus: 48, total allocated vcpus: 12 {{(pid=70020) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1129.157091] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2816MB phys_disk=200GB used_disk=12GB total_vcpus=48 used_vcpus=12 pci_stats=[] {{(pid=70020) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1129.160295] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bba770fb-d8be-4df2-869e-9d25522c7cb6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.170588] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb0cb629-117e-499c-b7a4-456ad6d31f9a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.187247] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Instance VIF info [] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1129.193452] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1129.196760] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1129.197317] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-92f6c618-126e-465c-8fa0-03e06ede9eec {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.218951] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1129.218951] env[70020]: value = "task-3619018" [ 1129.218951] env[70020]: _type = "Task" [ 1129.218951] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.231991] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619018, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.301165] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619015, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.302621] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1129.316868] env[70020]: DEBUG oslo_vmware.api [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3619016, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065388} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.317351] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1129.318493] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deb2567c-9452-46d8-b97f-6bc0851e4127 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.354205] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] c0a78ace-307e-4156-beb3-a53061acff7f/c0a78ace-307e-4156-beb3-a53061acff7f.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1129.357945] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-06e0bf91-4af8-4ce7-a74d-092130b43314 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.376772] env[70020]: DEBUG oslo_vmware.api [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619017, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.385092] env[70020]: DEBUG oslo_vmware.api [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 1129.385092] env[70020]: value = "task-3619019" [ 1129.385092] env[70020]: _type = "Task" [ 1129.385092] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.395319] env[70020]: DEBUG oslo_vmware.api [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3619019, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.435610] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36a8e293-d1fd-42f4-a41b-a7dec90c358b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.443719] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3deb4c82-871c-4827-a5cb-85049657f5f5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.474882] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41f18584-1f91-4309-947b-4410a241b21b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.482684] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f5e60f8-c21d-4aee-94c7-ad846094d917 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.495930] env[70020]: DEBUG nova.compute.provider_tree [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1129.728808] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619018, 'name': CreateVM_Task, 'duration_secs': 0.37065} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.729470] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1129.730075] env[70020]: DEBUG oslo_concurrency.lockutils [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1129.730932] env[70020]: DEBUG oslo_concurrency.lockutils [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1129.730932] env[70020]: DEBUG oslo_concurrency.lockutils [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1129.730932] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d63f0005-4abe-410a-8260-5bca06de1d54 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.735051] env[70020]: DEBUG oslo_vmware.api [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Waiting for the task: (returnval){ [ 1129.735051] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5251122f-eab8-0efb-0596-655cf6d4f7f6" [ 1129.735051] env[70020]: _type = "Task" [ 1129.735051] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.742168] env[70020]: DEBUG oslo_vmware.api [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5251122f-eab8-0efb-0596-655cf6d4f7f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.798374] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619015, 'name': CreateVM_Task, 'duration_secs': 0.561858} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.798563] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1129.799194] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1129.837860] env[70020]: DEBUG oslo_vmware.api [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619017, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.554395} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.838101] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 899183eb-ba25-491f-b981-77a33239ed74/899183eb-ba25-491f-b981-77a33239ed74.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1129.838307] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: 899183eb-ba25-491f-b981-77a33239ed74] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1129.838531] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0dcc3ea7-1f95-4fa0-965c-35ba101c0879 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.844605] env[70020]: DEBUG oslo_vmware.api [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Waiting for the task: (returnval){ [ 1129.844605] env[70020]: value = "task-3619020" [ 1129.844605] env[70020]: _type = "Task" [ 1129.844605] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.852454] env[70020]: DEBUG oslo_vmware.api [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619020, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.893776] env[70020]: DEBUG oslo_vmware.api [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3619019, 'name': ReconfigVM_Task, 'duration_secs': 0.334309} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.894056] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Reconfigured VM instance instance-0000006b to attach disk [datastore1] c0a78ace-307e-4156-beb3-a53061acff7f/c0a78ace-307e-4156-beb3-a53061acff7f.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1129.894681] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-703e8d31-9cb6-415b-aefb-86ec12afce22 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.901048] env[70020]: DEBUG oslo_vmware.api [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 1129.901048] env[70020]: value = "task-3619021" [ 1129.901048] env[70020]: _type = "Task" [ 1129.901048] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.908678] env[70020]: DEBUG oslo_vmware.api [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3619021, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.999878] env[70020]: DEBUG nova.scheduler.client.report [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1130.245964] env[70020]: DEBUG oslo_vmware.api [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5251122f-eab8-0efb-0596-655cf6d4f7f6, 'name': SearchDatastore_Task, 'duration_secs': 0.007812} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.246207] env[70020]: DEBUG oslo_concurrency.lockutils [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1130.246470] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1130.246713] env[70020]: DEBUG oslo_concurrency.lockutils [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1130.246857] env[70020]: DEBUG oslo_concurrency.lockutils [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1130.247044] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1130.247325] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1130.247626] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1130.247859] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e0aaab1a-6c4c-48c8-b400-81d2d8a2e929 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.249609] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75b2f4c9-818e-407d-b906-53cf447fc887 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.254776] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1130.254776] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52a2384f-387a-95e3-8a6d-95b33e3b2250" [ 1130.254776] env[70020]: _type = "Task" [ 1130.254776] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.258875] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1130.259108] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1130.262229] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1099c2b0-3a0a-4f2b-bd20-65372774301c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.264224] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52a2384f-387a-95e3-8a6d-95b33e3b2250, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.267214] env[70020]: DEBUG oslo_vmware.api [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Waiting for the task: (returnval){ [ 1130.267214] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]523a64d1-8e9a-0a73-b95f-a7efe74f0770" [ 1130.267214] env[70020]: _type = "Task" [ 1130.267214] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.273652] env[70020]: DEBUG oslo_vmware.api [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]523a64d1-8e9a-0a73-b95f-a7efe74f0770, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.353650] env[70020]: DEBUG oslo_vmware.api [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619020, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06486} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.353917] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: 899183eb-ba25-491f-b981-77a33239ed74] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1130.354655] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da454042-cb37-4a68-a562-e7ea667d8c81 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.373206] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: 899183eb-ba25-491f-b981-77a33239ed74] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] 899183eb-ba25-491f-b981-77a33239ed74/899183eb-ba25-491f-b981-77a33239ed74.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1130.373400] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-745f6bf9-d8bb-4b7c-a71b-e86c90cbf0ae {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.391700] env[70020]: DEBUG oslo_vmware.api [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Waiting for the task: (returnval){ [ 1130.391700] env[70020]: value = "task-3619022" [ 1130.391700] env[70020]: _type = "Task" [ 1130.391700] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.399033] env[70020]: DEBUG oslo_vmware.api [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619022, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.409382] env[70020]: DEBUG oslo_vmware.api [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3619021, 'name': Rename_Task, 'duration_secs': 0.143052} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.409632] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1130.409877] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-77d668a0-f1b6-41cf-916e-8ab2a04408a9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.415453] env[70020]: DEBUG oslo_vmware.api [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 1130.415453] env[70020]: value = "task-3619023" [ 1130.415453] env[70020]: _type = "Task" [ 1130.415453] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.426984] env[70020]: DEBUG oslo_vmware.api [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3619023, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.505545] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=70020) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1130.505753] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.400s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1130.506046] env[70020]: DEBUG oslo_concurrency.lockutils [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.549s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1130.507640] env[70020]: INFO nova.compute.claims [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1130.765650] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52a2384f-387a-95e3-8a6d-95b33e3b2250, 'name': SearchDatastore_Task, 'duration_secs': 0.009708} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.765998] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1130.766115] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1130.766330] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1130.776822] env[70020]: DEBUG oslo_vmware.api [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]523a64d1-8e9a-0a73-b95f-a7efe74f0770, 'name': SearchDatastore_Task, 'duration_secs': 0.007403} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.777559] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f726d17a-0aa6-434e-bd59-b127e6df275e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.782222] env[70020]: DEBUG oslo_vmware.api [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Waiting for the task: (returnval){ [ 1130.782222] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52b393fc-c8a0-4b45-ce0d-c66a8f69c3c7" [ 1130.782222] env[70020]: _type = "Task" [ 1130.782222] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.789374] env[70020]: DEBUG oslo_vmware.api [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b393fc-c8a0-4b45-ce0d-c66a8f69c3c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.901478] env[70020]: DEBUG oslo_vmware.api [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619022, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.924624] env[70020]: DEBUG oslo_vmware.api [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3619023, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.292394] env[70020]: DEBUG oslo_vmware.api [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b393fc-c8a0-4b45-ce0d-c66a8f69c3c7, 'name': SearchDatastore_Task, 'duration_secs': 0.008566} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.292649] env[70020]: DEBUG oslo_concurrency.lockutils [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1131.292905] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] d0252c4e-0991-45b9-bf0b-b8e41093e518/d0252c4e-0991-45b9-bf0b-b8e41093e518.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1131.293237] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1131.293455] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1131.293665] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e5220ac7-8d45-45d5-83a8-c3d3efbdc02f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.295763] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e25a79a8-dee1-4fc8-9eb5-0e207397e453 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.302783] env[70020]: DEBUG oslo_vmware.api [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Waiting for the task: (returnval){ [ 1131.302783] env[70020]: value = "task-3619024" [ 1131.302783] env[70020]: _type = "Task" [ 1131.302783] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.303829] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1131.304009] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1131.307362] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6d855aa-e865-4a0a-a87d-ae1ea8f8eed7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.314840] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1131.314840] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5248eab5-a6e4-8565-9841-d6b5aa70434d" [ 1131.314840] env[70020]: _type = "Task" [ 1131.314840] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.315048] env[70020]: DEBUG oslo_vmware.api [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619024, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.322258] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5248eab5-a6e4-8565-9841-d6b5aa70434d, 'name': SearchDatastore_Task, 'duration_secs': 0.007749} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.322946] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f64bc09a-b1fa-436b-91ef-dc4ea2e84e76 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.327388] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1131.327388] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52e10a14-3365-685e-42c7-ac7b2ba4642b" [ 1131.327388] env[70020]: _type = "Task" [ 1131.327388] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.334470] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52e10a14-3365-685e-42c7-ac7b2ba4642b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.401466] env[70020]: DEBUG oslo_vmware.api [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619022, 'name': ReconfigVM_Task, 'duration_secs': 0.881704} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.401740] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: 899183eb-ba25-491f-b981-77a33239ed74] Reconfigured VM instance instance-0000006c to attach disk [datastore1] 899183eb-ba25-491f-b981-77a33239ed74/899183eb-ba25-491f-b981-77a33239ed74.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1131.402344] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-80bdd650-bf1c-42b4-b8ff-402844a8cb9b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.407857] env[70020]: DEBUG oslo_vmware.api [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Waiting for the task: (returnval){ [ 1131.407857] env[70020]: value = "task-3619025" [ 1131.407857] env[70020]: _type = "Task" [ 1131.407857] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.419667] env[70020]: DEBUG oslo_vmware.api [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619025, 'name': Rename_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.426648] env[70020]: DEBUG oslo_vmware.api [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3619023, 'name': PowerOnVM_Task, 'duration_secs': 0.564236} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.426894] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1131.427114] env[70020]: INFO nova.compute.manager [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Took 7.31 seconds to spawn the instance on the hypervisor. [ 1131.427294] env[70020]: DEBUG nova.compute.manager [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1131.428098] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d980803-4c1e-4a44-838d-0560dc2736e0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.771050] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ea2782-9848-4260-bc0e-b2d4f37aeb8d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.779773] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd9e7a70-d238-40c1-831a-0c098c31190a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.813539] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e09d1a7d-3c01-4a9e-9ef4-7b9c800a953f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.820575] env[70020]: DEBUG oslo_vmware.api [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619024, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.441514} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.823112] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] d0252c4e-0991-45b9-bf0b-b8e41093e518/d0252c4e-0991-45b9-bf0b-b8e41093e518.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1131.823338] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1131.823615] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a39429f3-bfc8-4ef8-880c-f8e7588e2672 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.826194] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f912ea2-5e83-4722-ac1f-c1c7c5bebc54 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.835583] env[70020]: DEBUG oslo_vmware.api [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Waiting for the task: (returnval){ [ 1131.835583] env[70020]: value = "task-3619026" [ 1131.835583] env[70020]: _type = "Task" [ 1131.835583] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.857334] env[70020]: DEBUG nova.compute.provider_tree [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1131.859413] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52e10a14-3365-685e-42c7-ac7b2ba4642b, 'name': SearchDatastore_Task, 'duration_secs': 0.007593} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.863030] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1131.863030] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 8dbb1de0-38de-493f-9512-b8754bab7bcb/8dbb1de0-38de-493f-9512-b8754bab7bcb.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1131.863030] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bba23be9-9e82-4680-8773-062c83b7e432 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.868691] env[70020]: DEBUG oslo_vmware.api [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619026, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.874389] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1131.874389] env[70020]: value = "task-3619027" [ 1131.874389] env[70020]: _type = "Task" [ 1131.874389] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.885265] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619027, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.916091] env[70020]: DEBUG oslo_vmware.api [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619025, 'name': Rename_Task, 'duration_secs': 0.176241} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.916357] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: 899183eb-ba25-491f-b981-77a33239ed74] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1131.916601] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1191c64c-3431-41d6-9b6a-e549e223520a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.921476] env[70020]: DEBUG oslo_vmware.api [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Waiting for the task: (returnval){ [ 1131.921476] env[70020]: value = "task-3619028" [ 1131.921476] env[70020]: _type = "Task" [ 1131.921476] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.928448] env[70020]: DEBUG oslo_vmware.api [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619028, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.945984] env[70020]: INFO nova.compute.manager [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Took 20.98 seconds to build instance. [ 1132.345841] env[70020]: DEBUG oslo_vmware.api [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619026, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063285} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.346141] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1132.346951] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e0e2429-79bb-4133-9414-80f5676fe282 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.368453] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] d0252c4e-0991-45b9-bf0b-b8e41093e518/d0252c4e-0991-45b9-bf0b-b8e41093e518.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1132.371366] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d516eebc-edda-48f9-9030-10e77adff2ac {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.395777] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619027, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.395777] env[70020]: DEBUG oslo_vmware.api [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Waiting for the task: (returnval){ [ 1132.395777] env[70020]: value = "task-3619029" [ 1132.395777] env[70020]: _type = "Task" [ 1132.395777] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.402686] env[70020]: DEBUG oslo_vmware.api [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619029, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.403483] env[70020]: ERROR nova.scheduler.client.report [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [req-fe96cb04-4f3b-43f9-aed3-05d8e533f297] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-fe96cb04-4f3b-43f9-aed3-05d8e533f297"}]} [ 1132.419854] env[70020]: DEBUG nova.scheduler.client.report [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1132.432103] env[70020]: DEBUG oslo_vmware.api [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619028, 'name': PowerOnVM_Task} progress is 88%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.433555] env[70020]: DEBUG nova.scheduler.client.report [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1132.433742] env[70020]: DEBUG nova.compute.provider_tree [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1132.444522] env[70020]: DEBUG nova.scheduler.client.report [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1132.447789] env[70020]: DEBUG oslo_concurrency.lockutils [None req-6b338387-68f8-4112-9a07-76d8c982e748 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "c0a78ace-307e-4156-beb3-a53061acff7f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.491s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1132.461892] env[70020]: DEBUG nova.scheduler.client.report [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1132.666041] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1b3a23d-6451-42c0-b9d6-ea96b00fbfce {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.674809] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a95b4a9b-4757-4d7a-a3ab-f6da9a016a41 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.709108] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7e5a5f0-4679-440c-bf93-eda7ddab2f23 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.718671] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bc244ab-9d7e-4558-a171-6b92e42bba94 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.735221] env[70020]: DEBUG nova.compute.provider_tree [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1132.898225] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619027, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.970778} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.901173] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 8dbb1de0-38de-493f-9512-b8754bab7bcb/8dbb1de0-38de-493f-9512-b8754bab7bcb.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1132.901400] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1132.901646] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9834f0b5-65c4-436f-a084-3e4452c1684f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.908533] env[70020]: DEBUG oslo_vmware.api [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619029, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.909790] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1132.909790] env[70020]: value = "task-3619030" [ 1132.909790] env[70020]: _type = "Task" [ 1132.909790] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.917692] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619030, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.930189] env[70020]: DEBUG oslo_vmware.api [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619028, 'name': PowerOnVM_Task, 'duration_secs': 0.777035} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.930422] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: 899183eb-ba25-491f-b981-77a33239ed74] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1132.930617] env[70020]: INFO nova.compute.manager [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: 899183eb-ba25-491f-b981-77a33239ed74] Took 6.14 seconds to spawn the instance on the hypervisor. [ 1132.930794] env[70020]: DEBUG nova.compute.manager [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: 899183eb-ba25-491f-b981-77a33239ed74] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1132.931550] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-022f02ca-444e-4166-816e-7915596b2e71 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.255943] env[70020]: ERROR nova.scheduler.client.report [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [req-526da8e9-8817-4375-9ba5-190b3ad27c54] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-526da8e9-8817-4375-9ba5-190b3ad27c54"}]} [ 1133.273131] env[70020]: DEBUG nova.scheduler.client.report [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1133.287272] env[70020]: DEBUG nova.scheduler.client.report [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1133.287515] env[70020]: DEBUG nova.compute.provider_tree [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1133.298135] env[70020]: DEBUG nova.scheduler.client.report [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1133.316102] env[70020]: DEBUG nova.scheduler.client.report [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1133.408741] env[70020]: DEBUG oslo_vmware.api [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619029, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.419089] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619030, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.29673} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.419420] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1133.420257] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45a3c29d-d9f5-4470-b217-623bac692bec {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.443737] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] 8dbb1de0-38de-493f-9512-b8754bab7bcb/8dbb1de0-38de-493f-9512-b8754bab7bcb.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1133.450081] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-18fe0ea3-5a72-4643-93d2-d2adc6d99c70 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.467561] env[70020]: INFO nova.compute.manager [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: 899183eb-ba25-491f-b981-77a33239ed74] Took 16.05 seconds to build instance. [ 1133.473784] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1133.473784] env[70020]: value = "task-3619031" [ 1133.473784] env[70020]: _type = "Task" [ 1133.473784] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.485998] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619031, 'name': ReconfigVM_Task} progress is 10%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.552344] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5e1fb80-5c77-4ff8-8238-eb157c854d6c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.560566] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d022f16-8da9-404f-82af-f6247ac142d7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.592965] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-664c5b2e-f0be-4604-b976-eef52a810f90 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.600790] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7982fff-599b-42fd-a0c1-01a276b6e025 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.615205] env[70020]: DEBUG nova.compute.provider_tree [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1133.695891] env[70020]: DEBUG nova.compute.manager [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Stashing vm_state: active {{(pid=70020) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1133.908107] env[70020]: DEBUG oslo_vmware.api [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619029, 'name': ReconfigVM_Task, 'duration_secs': 1.175721} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.908502] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Reconfigured VM instance instance-0000006d to attach disk [datastore1] d0252c4e-0991-45b9-bf0b-b8e41093e518/d0252c4e-0991-45b9-bf0b-b8e41093e518.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1133.909013] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7d605850-edfc-4e12-85a3-ecebff934a1b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.915143] env[70020]: DEBUG oslo_vmware.api [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Waiting for the task: (returnval){ [ 1133.915143] env[70020]: value = "task-3619032" [ 1133.915143] env[70020]: _type = "Task" [ 1133.915143] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.922282] env[70020]: DEBUG oslo_vmware.api [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619032, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.970280] env[70020]: DEBUG oslo_concurrency.lockutils [None req-eb2e5d00-a394-436c-a753-5ce54704e1af tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Lock "899183eb-ba25-491f-b981-77a33239ed74" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.570s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1133.986262] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619031, 'name': ReconfigVM_Task, 'duration_secs': 0.370237} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.986662] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Reconfigured VM instance instance-00000063 to attach disk [datastore1] 8dbb1de0-38de-493f-9512-b8754bab7bcb/8dbb1de0-38de-493f-9512-b8754bab7bcb.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1133.988899] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'guest_format': None, 'device_name': '/dev/sda', 'boot_index': 0, 'disk_bus': None, 'encryption_format': None, 'encryption_secret_uuid': None, 'encrypted': False, 'encryption_options': None, 'size': 0, 'device_type': 'disk', 'image_id': 'c9cd83bf-fd12-4173-a067-f57d38f23556'}], 'ephemerals': [], 'block_device_mapping': [{'attachment_id': '7b15f559-ff7d-4e4b-8bd8-691b19c60855', 'guest_format': None, 'delete_on_termination': False, 'disk_bus': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721804', 'volume_id': '1acb6a62-8f9b-4b43-be82-4306b549a1ba', 'name': 'volume-1acb6a62-8f9b-4b43-be82-4306b549a1ba', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8dbb1de0-38de-493f-9512-b8754bab7bcb', 'attached_at': '', 'detached_at': '', 'volume_id': '1acb6a62-8f9b-4b43-be82-4306b549a1ba', 'serial': '1acb6a62-8f9b-4b43-be82-4306b549a1ba'}, 'mount_device': '/dev/sdb', 'boot_index': None, 'device_type': None, 'volume_type': None}], 'swap': None} {{(pid=70020) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1133.989188] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Volume attach. Driver type: vmdk {{(pid=70020) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1133.989450] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721804', 'volume_id': '1acb6a62-8f9b-4b43-be82-4306b549a1ba', 'name': 'volume-1acb6a62-8f9b-4b43-be82-4306b549a1ba', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8dbb1de0-38de-493f-9512-b8754bab7bcb', 'attached_at': '', 'detached_at': '', 'volume_id': '1acb6a62-8f9b-4b43-be82-4306b549a1ba', 'serial': '1acb6a62-8f9b-4b43-be82-4306b549a1ba'} {{(pid=70020) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1133.990549] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e8fce1b-b004-4c2d-aebc-3479eb44ac0a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.014162] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fe32339-94a9-492b-9f94-62e8426dc3ee {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.048368] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] volume-1acb6a62-8f9b-4b43-be82-4306b549a1ba/volume-1acb6a62-8f9b-4b43-be82-4306b549a1ba.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1134.048655] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-885e03b2-ae81-47ac-afcf-dddd090e877d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.065895] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1134.065895] env[70020]: value = "task-3619033" [ 1134.065895] env[70020]: _type = "Task" [ 1134.065895] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.073315] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619033, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.154444] env[70020]: DEBUG nova.scheduler.client.report [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Updated inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with generation 155 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1134.154703] env[70020]: DEBUG nova.compute.provider_tree [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Updating resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d generation from 155 to 156 during operation: update_inventory {{(pid=70020) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1134.154880] env[70020]: DEBUG nova.compute.provider_tree [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1134.213932] env[70020]: DEBUG oslo_concurrency.lockutils [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1134.425568] env[70020]: DEBUG oslo_vmware.api [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619032, 'name': Rename_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.575218] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619033, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.660358] env[70020]: DEBUG oslo_concurrency.lockutils [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.154s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1134.660897] env[70020]: DEBUG nova.compute.manager [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1134.663525] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.543s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1134.665053] env[70020]: INFO nova.compute.claims [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1134.928495] env[70020]: DEBUG oslo_vmware.api [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619032, 'name': Rename_Task, 'duration_secs': 0.554451} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.928898] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1134.929145] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5102bce0-c198-4b44-a5fa-874a744f0b44 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.936741] env[70020]: DEBUG oslo_vmware.api [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Waiting for the task: (returnval){ [ 1134.936741] env[70020]: value = "task-3619034" [ 1134.936741] env[70020]: _type = "Task" [ 1134.936741] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.946689] env[70020]: DEBUG oslo_vmware.api [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619034, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.075940] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619033, 'name': ReconfigVM_Task, 'duration_secs': 0.989818} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.076236] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Reconfigured VM instance instance-00000063 to attach disk [datastore1] volume-1acb6a62-8f9b-4b43-be82-4306b549a1ba/volume-1acb6a62-8f9b-4b43-be82-4306b549a1ba.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1135.081353] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-46a345e0-7d79-47a0-b545-e12d2675ad96 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.096325] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1135.096325] env[70020]: value = "task-3619035" [ 1135.096325] env[70020]: _type = "Task" [ 1135.096325] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.107309] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619035, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.170769] env[70020]: DEBUG nova.compute.utils [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1135.177303] env[70020]: DEBUG nova.compute.manager [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1135.177642] env[70020]: DEBUG nova.network.neutron [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1135.217505] env[70020]: DEBUG nova.policy [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '025d293d3c0449e1b36a7aa465ad1110', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bd3733a000724aab9255cb498cecdfba', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1135.449978] env[70020]: DEBUG oslo_vmware.api [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619034, 'name': PowerOnVM_Task, 'duration_secs': 0.48247} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.453393] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1135.453655] env[70020]: INFO nova.compute.manager [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Took 6.32 seconds to spawn the instance on the hypervisor. [ 1135.453850] env[70020]: DEBUG nova.compute.manager [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1135.454723] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d96f506-eb8e-45d8-942c-ba640cd07492 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.499387] env[70020]: DEBUG nova.network.neutron [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Successfully created port: 7716680a-01d2-46d9-8812-272c03afab45 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1135.606176] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619035, 'name': ReconfigVM_Task, 'duration_secs': 0.149316} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.606505] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721804', 'volume_id': '1acb6a62-8f9b-4b43-be82-4306b549a1ba', 'name': 'volume-1acb6a62-8f9b-4b43-be82-4306b549a1ba', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8dbb1de0-38de-493f-9512-b8754bab7bcb', 'attached_at': '', 'detached_at': '', 'volume_id': '1acb6a62-8f9b-4b43-be82-4306b549a1ba', 'serial': '1acb6a62-8f9b-4b43-be82-4306b549a1ba'} {{(pid=70020) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1135.607093] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f23d2942-7b55-4f52-8757-b53a707c4cc6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.613749] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1135.613749] env[70020]: value = "task-3619036" [ 1135.613749] env[70020]: _type = "Task" [ 1135.613749] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.621594] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619036, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.678583] env[70020]: DEBUG nova.compute.manager [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1135.921993] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65a044bb-2b3b-43e3-a108-10d35788e58e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.929957] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aaf189a-1d9f-4a5d-92e4-13fe135f9d44 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.961740] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ac20541-826a-4903-9983-db9dbeb40ef0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.972635] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa2b06c9-9002-4562-bc0f-8f9a5445be48 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.978428] env[70020]: INFO nova.compute.manager [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Took 18.36 seconds to build instance. [ 1135.988781] env[70020]: DEBUG nova.compute.provider_tree [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1136.123376] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619036, 'name': Rename_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.348440] env[70020]: INFO nova.compute.manager [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Rebuilding instance [ 1136.382914] env[70020]: DEBUG nova.compute.manager [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1136.383809] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2868291f-9e90-4064-85c3-fba154668ec6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.480959] env[70020]: DEBUG oslo_concurrency.lockutils [None req-de4a0e68-53b4-4ba2-ab5f-6735a7e7a7c6 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Lock "d0252c4e-0991-45b9-bf0b-b8e41093e518" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.865s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1136.491550] env[70020]: DEBUG nova.scheduler.client.report [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1136.623997] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619036, 'name': Rename_Task, 'duration_secs': 0.663401} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.624298] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1136.624542] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6c1ca912-65ad-4c98-9f0b-f68d8271ec1f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.631028] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1136.631028] env[70020]: value = "task-3619037" [ 1136.631028] env[70020]: _type = "Task" [ 1136.631028] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.638100] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619037, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.694058] env[70020]: DEBUG nova.compute.manager [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1136.718287] env[70020]: DEBUG nova.virt.hardware [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1136.719119] env[70020]: DEBUG nova.virt.hardware [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1136.719119] env[70020]: DEBUG nova.virt.hardware [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1136.719259] env[70020]: DEBUG nova.virt.hardware [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1136.719499] env[70020]: DEBUG nova.virt.hardware [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1136.719809] env[70020]: DEBUG nova.virt.hardware [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1136.720138] env[70020]: DEBUG nova.virt.hardware [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1136.720432] env[70020]: DEBUG nova.virt.hardware [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1136.720739] env[70020]: DEBUG nova.virt.hardware [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1136.721062] env[70020]: DEBUG nova.virt.hardware [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1136.721379] env[70020]: DEBUG nova.virt.hardware [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1136.722519] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f32e6d4e-dcc1-4859-92ea-378391b77b72 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.730619] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e98508b-c7fb-4b21-aa96-37b8ff72da91 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.890662] env[70020]: DEBUG nova.compute.manager [req-b3f0d335-65fc-4378-b7a3-38c5eb1573d3 req-8d263fa2-46c2-423a-9d78-f21766c24019 service nova] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Received event network-vif-plugged-7716680a-01d2-46d9-8812-272c03afab45 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1136.890901] env[70020]: DEBUG oslo_concurrency.lockutils [req-b3f0d335-65fc-4378-b7a3-38c5eb1573d3 req-8d263fa2-46c2-423a-9d78-f21766c24019 service nova] Acquiring lock "1e9d39d5-40fd-40b7-9421-94e0bff0314e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1136.891160] env[70020]: DEBUG oslo_concurrency.lockutils [req-b3f0d335-65fc-4378-b7a3-38c5eb1573d3 req-8d263fa2-46c2-423a-9d78-f21766c24019 service nova] Lock "1e9d39d5-40fd-40b7-9421-94e0bff0314e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1136.891334] env[70020]: DEBUG oslo_concurrency.lockutils [req-b3f0d335-65fc-4378-b7a3-38c5eb1573d3 req-8d263fa2-46c2-423a-9d78-f21766c24019 service nova] Lock "1e9d39d5-40fd-40b7-9421-94e0bff0314e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1136.891501] env[70020]: DEBUG nova.compute.manager [req-b3f0d335-65fc-4378-b7a3-38c5eb1573d3 req-8d263fa2-46c2-423a-9d78-f21766c24019 service nova] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] No waiting events found dispatching network-vif-plugged-7716680a-01d2-46d9-8812-272c03afab45 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1136.891662] env[70020]: WARNING nova.compute.manager [req-b3f0d335-65fc-4378-b7a3-38c5eb1573d3 req-8d263fa2-46c2-423a-9d78-f21766c24019 service nova] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Received unexpected event network-vif-plugged-7716680a-01d2-46d9-8812-272c03afab45 for instance with vm_state building and task_state spawning. [ 1136.983381] env[70020]: DEBUG nova.network.neutron [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Successfully updated port: 7716680a-01d2-46d9-8812-272c03afab45 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1136.996820] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.333s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1136.997401] env[70020]: DEBUG nova.compute.manager [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1137.001025] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d303fe78-1c34-475b-8067-bce2e43dfcc0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.024s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1137.001129] env[70020]: DEBUG nova.objects.instance [None req-d303fe78-1c34-475b-8067-bce2e43dfcc0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lazy-loading 'resources' on Instance uuid c972e083-8c91-4875-a8c6-8257b06c93a1 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1137.140979] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619037, 'name': PowerOnVM_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.398589] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1137.398914] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0476afd3-8b8a-4f5e-8d3c-92f102d1f5db {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.406405] env[70020]: DEBUG oslo_vmware.api [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Waiting for the task: (returnval){ [ 1137.406405] env[70020]: value = "task-3619038" [ 1137.406405] env[70020]: _type = "Task" [ 1137.406405] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.414318] env[70020]: DEBUG oslo_vmware.api [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619038, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.486306] env[70020]: DEBUG oslo_concurrency.lockutils [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "refresh_cache-1e9d39d5-40fd-40b7-9421-94e0bff0314e" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1137.486505] env[70020]: DEBUG oslo_concurrency.lockutils [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquired lock "refresh_cache-1e9d39d5-40fd-40b7-9421-94e0bff0314e" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1137.486663] env[70020]: DEBUG nova.network.neutron [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1137.502534] env[70020]: DEBUG nova.compute.utils [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1137.504244] env[70020]: DEBUG nova.compute.manager [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1137.504418] env[70020]: DEBUG nova.network.neutron [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1137.544398] env[70020]: DEBUG nova.policy [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7b543e081f574f1f85874775a734a0a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4e3eae740ef84ef88aef113ed4d6e57b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1137.650950] env[70020]: DEBUG oslo_vmware.api [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619037, 'name': PowerOnVM_Task, 'duration_secs': 0.527089} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.651402] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1137.651744] env[70020]: DEBUG nova.compute.manager [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1137.656236] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-358b7e2d-6013-44b6-84c5-7414cc2bc52e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.736298] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d921e36c-3ac5-4641-b9ee-0e6f9ed53c2b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.744479] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c95e44d8-7b26-4fa6-9592-13483f189bed {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.778018] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9859ea5-d6d2-4ab2-b2dc-c0b77f707f00 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.786348] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28c93bf8-c386-4f19-bc74-b5004e341528 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.799338] env[70020]: DEBUG nova.compute.provider_tree [None req-d303fe78-1c34-475b-8067-bce2e43dfcc0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1137.831028] env[70020]: DEBUG nova.network.neutron [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Successfully created port: 6c9093b6-4b0e-4ac8-b1b1-4e54e43363a5 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1137.917979] env[70020]: DEBUG oslo_vmware.api [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619038, 'name': PowerOffVM_Task, 'duration_secs': 0.295632} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.917979] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1137.918148] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1137.919209] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-affc942f-12ec-45a4-891b-ec64fc22b7d1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.926189] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1137.926424] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c1a46fa8-da9c-4cc8-b175-7f73b61b053d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.950772] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1137.950999] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1137.951199] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Deleting the datastore file [datastore1] d0252c4e-0991-45b9-bf0b-b8e41093e518 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1137.951459] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-07ffa36b-efe2-42a9-b197-4b919bdcdf72 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.958046] env[70020]: DEBUG oslo_vmware.api [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Waiting for the task: (returnval){ [ 1137.958046] env[70020]: value = "task-3619040" [ 1137.958046] env[70020]: _type = "Task" [ 1137.958046] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.965277] env[70020]: DEBUG oslo_vmware.api [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619040, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.005381] env[70020]: DEBUG nova.compute.manager [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1138.030282] env[70020]: DEBUG nova.network.neutron [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1138.181251] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1138.209031] env[70020]: DEBUG nova.network.neutron [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Updating instance_info_cache with network_info: [{"id": "7716680a-01d2-46d9-8812-272c03afab45", "address": "fa:16:3e:6c:09:81", "network": {"id": "bff860aa-ea09-4260-84e8-d9ffd2c5bf4b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1893681432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd3733a000724aab9255cb498cecdfba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7716680a-01", "ovs_interfaceid": "7716680a-01d2-46d9-8812-272c03afab45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1138.302527] env[70020]: DEBUG nova.scheduler.client.report [None req-d303fe78-1c34-475b-8067-bce2e43dfcc0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1138.467806] env[70020]: DEBUG oslo_vmware.api [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619040, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.377682} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.468102] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1138.468288] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1138.468460] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1138.711856] env[70020]: DEBUG oslo_concurrency.lockutils [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Releasing lock "refresh_cache-1e9d39d5-40fd-40b7-9421-94e0bff0314e" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1138.712241] env[70020]: DEBUG nova.compute.manager [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Instance network_info: |[{"id": "7716680a-01d2-46d9-8812-272c03afab45", "address": "fa:16:3e:6c:09:81", "network": {"id": "bff860aa-ea09-4260-84e8-d9ffd2c5bf4b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1893681432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd3733a000724aab9255cb498cecdfba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7716680a-01", "ovs_interfaceid": "7716680a-01d2-46d9-8812-272c03afab45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1138.712689] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6c:09:81', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3952eb02-1162-48ed-8227-9c138960d583', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7716680a-01d2-46d9-8812-272c03afab45', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1138.720294] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1138.720521] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1138.720752] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-21c6ba05-5490-457d-b0f9-c3b5edde83f8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.740644] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1138.740644] env[70020]: value = "task-3619041" [ 1138.740644] env[70020]: _type = "Task" [ 1138.740644] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.748630] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619041, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.808625] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d303fe78-1c34-475b-8067-bce2e43dfcc0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.807s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1138.811976] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 9.508s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1138.839379] env[70020]: INFO nova.scheduler.client.report [None req-d303fe78-1c34-475b-8067-bce2e43dfcc0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Deleted allocations for instance c972e083-8c91-4875-a8c6-8257b06c93a1 [ 1139.016039] env[70020]: DEBUG nova.compute.manager [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1139.036570] env[70020]: DEBUG nova.compute.manager [req-408eb453-852e-4ad6-a890-87b28b8ce580 req-ff14303b-ee2d-4bf9-b69e-41b2b187567a service nova] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Received event network-changed-7716680a-01d2-46d9-8812-272c03afab45 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1139.036700] env[70020]: DEBUG nova.compute.manager [req-408eb453-852e-4ad6-a890-87b28b8ce580 req-ff14303b-ee2d-4bf9-b69e-41b2b187567a service nova] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Refreshing instance network info cache due to event network-changed-7716680a-01d2-46d9-8812-272c03afab45. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1139.036916] env[70020]: DEBUG oslo_concurrency.lockutils [req-408eb453-852e-4ad6-a890-87b28b8ce580 req-ff14303b-ee2d-4bf9-b69e-41b2b187567a service nova] Acquiring lock "refresh_cache-1e9d39d5-40fd-40b7-9421-94e0bff0314e" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1139.037076] env[70020]: DEBUG oslo_concurrency.lockutils [req-408eb453-852e-4ad6-a890-87b28b8ce580 req-ff14303b-ee2d-4bf9-b69e-41b2b187567a service nova] Acquired lock "refresh_cache-1e9d39d5-40fd-40b7-9421-94e0bff0314e" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1139.037242] env[70020]: DEBUG nova.network.neutron [req-408eb453-852e-4ad6-a890-87b28b8ce580 req-ff14303b-ee2d-4bf9-b69e-41b2b187567a service nova] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Refreshing network info cache for port 7716680a-01d2-46d9-8812-272c03afab45 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1139.049233] env[70020]: DEBUG nova.virt.hardware [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1139.049569] env[70020]: DEBUG nova.virt.hardware [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1139.049731] env[70020]: DEBUG nova.virt.hardware [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1139.050149] env[70020]: DEBUG nova.virt.hardware [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1139.050361] env[70020]: DEBUG nova.virt.hardware [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1139.050648] env[70020]: DEBUG nova.virt.hardware [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1139.050974] env[70020]: DEBUG nova.virt.hardware [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1139.051249] env[70020]: DEBUG nova.virt.hardware [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1139.051471] env[70020]: DEBUG nova.virt.hardware [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1139.051585] env[70020]: DEBUG nova.virt.hardware [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1139.051758] env[70020]: DEBUG nova.virt.hardware [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1139.053173] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-956d5325-71b0-47df-8327-7fd1ff1889ce {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.063094] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3d8a638-5738-4f04-a830-abffff88e492 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.255424] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619041, 'name': CreateVM_Task, 'duration_secs': 0.420729} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.255676] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1139.256565] env[70020]: DEBUG oslo_concurrency.lockutils [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1139.256879] env[70020]: DEBUG oslo_concurrency.lockutils [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1139.257392] env[70020]: DEBUG oslo_concurrency.lockutils [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1139.257662] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3558459-a480-4cfd-93e0-8e307c286a8b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.263552] env[70020]: DEBUG oslo_vmware.api [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1139.263552] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5225f8f3-7d2f-f3a9-25a9-322c1366225f" [ 1139.263552] env[70020]: _type = "Task" [ 1139.263552] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.274046] env[70020]: DEBUG oslo_vmware.api [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5225f8f3-7d2f-f3a9-25a9-322c1366225f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.315793] env[70020]: INFO nova.compute.claims [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1139.347736] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d303fe78-1c34-475b-8067-bce2e43dfcc0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "c972e083-8c91-4875-a8c6-8257b06c93a1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.114s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1139.501616] env[70020]: DEBUG nova.virt.hardware [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1139.501861] env[70020]: DEBUG nova.virt.hardware [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1139.502026] env[70020]: DEBUG nova.virt.hardware [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1139.502214] env[70020]: DEBUG nova.virt.hardware [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1139.502359] env[70020]: DEBUG nova.virt.hardware [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1139.502506] env[70020]: DEBUG nova.virt.hardware [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1139.502705] env[70020]: DEBUG nova.virt.hardware [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1139.502862] env[70020]: DEBUG nova.virt.hardware [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1139.503037] env[70020]: DEBUG nova.virt.hardware [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1139.503201] env[70020]: DEBUG nova.virt.hardware [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1139.503371] env[70020]: DEBUG nova.virt.hardware [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1139.504285] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f47071a-90b6-4262-9669-b775cb8931b9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.513348] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ded9d3b-bbac-4623-9ca2-b08ed0536333 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.527955] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Instance VIF info [] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1139.533603] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1139.533866] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1139.534092] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c3bab68e-3942-429f-8fae-16169541c0a6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.553269] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1139.553269] env[70020]: value = "task-3619042" [ 1139.553269] env[70020]: _type = "Task" [ 1139.553269] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.564489] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619042, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.774799] env[70020]: DEBUG oslo_vmware.api [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5225f8f3-7d2f-f3a9-25a9-322c1366225f, 'name': SearchDatastore_Task, 'duration_secs': 0.009969} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.775140] env[70020]: DEBUG oslo_concurrency.lockutils [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1139.775383] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1139.775689] env[70020]: DEBUG oslo_concurrency.lockutils [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1139.775836] env[70020]: DEBUG oslo_concurrency.lockutils [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1139.776028] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1139.776290] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2f05d11f-5f7a-4626-9799-795c7c4f6f00 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.787016] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1139.787228] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1139.787960] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66c6a8c0-77d3-4bce-9b29-ec6bf493324e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.793193] env[70020]: DEBUG oslo_vmware.api [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1139.793193] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52ca1b54-e6c3-2022-f1d6-658ea48a53a3" [ 1139.793193] env[70020]: _type = "Task" [ 1139.793193] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.802474] env[70020]: DEBUG nova.network.neutron [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Successfully updated port: 6c9093b6-4b0e-4ac8-b1b1-4e54e43363a5 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1139.807360] env[70020]: DEBUG oslo_vmware.api [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ca1b54-e6c3-2022-f1d6-658ea48a53a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.821703] env[70020]: INFO nova.compute.resource_tracker [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Updating resource usage from migration 4939402d-e13e-49ea-912f-3c8637ee0898 [ 1139.988474] env[70020]: DEBUG nova.network.neutron [req-408eb453-852e-4ad6-a890-87b28b8ce580 req-ff14303b-ee2d-4bf9-b69e-41b2b187567a service nova] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Updated VIF entry in instance network info cache for port 7716680a-01d2-46d9-8812-272c03afab45. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1139.988840] env[70020]: DEBUG nova.network.neutron [req-408eb453-852e-4ad6-a890-87b28b8ce580 req-ff14303b-ee2d-4bf9-b69e-41b2b187567a service nova] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Updating instance_info_cache with network_info: [{"id": "7716680a-01d2-46d9-8812-272c03afab45", "address": "fa:16:3e:6c:09:81", "network": {"id": "bff860aa-ea09-4260-84e8-d9ffd2c5bf4b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1893681432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd3733a000724aab9255cb498cecdfba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7716680a-01", "ovs_interfaceid": "7716680a-01d2-46d9-8812-272c03afab45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1140.041683] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e53dab89-5717-496e-b804-3337a2fb4ee2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.049378] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e1eaff6-9327-49ae-923b-615440e8b79b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.084507] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22890682-5010-4cf7-ae19-6c8763a6aee5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.090042] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619042, 'name': CreateVM_Task, 'duration_secs': 0.30174} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.090668] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1140.090950] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1140.091123] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1140.091432] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1140.091696] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-304f3284-c741-454b-89d6-99e9e510eebd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.096565] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c206933-4f15-4304-a690-9168498bf87a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.101169] env[70020]: DEBUG oslo_vmware.api [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Waiting for the task: (returnval){ [ 1140.101169] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]524105fa-a4f0-a5bc-b01b-5059e808a0d7" [ 1140.101169] env[70020]: _type = "Task" [ 1140.101169] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.112292] env[70020]: DEBUG nova.compute.provider_tree [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1140.118528] env[70020]: DEBUG oslo_vmware.api [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]524105fa-a4f0-a5bc-b01b-5059e808a0d7, 'name': SearchDatastore_Task, 'duration_secs': 0.014033} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.118789] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1140.119022] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1140.119255] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1140.119397] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1140.119569] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1140.119936] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-854a319e-26bf-47ad-9303-141b01ae4a68 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.128743] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1140.128924] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1140.129642] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58ec5cfd-365d-45c5-a657-36cc66913c01 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.137657] env[70020]: DEBUG oslo_vmware.api [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Waiting for the task: (returnval){ [ 1140.137657] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52c2d1f0-909a-4bc2-4acb-ef9fff6394c4" [ 1140.137657] env[70020]: _type = "Task" [ 1140.137657] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.142394] env[70020]: DEBUG oslo_vmware.api [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52c2d1f0-909a-4bc2-4acb-ef9fff6394c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.304449] env[70020]: DEBUG oslo_vmware.api [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ca1b54-e6c3-2022-f1d6-658ea48a53a3, 'name': SearchDatastore_Task, 'duration_secs': 0.010353} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.306286] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5584111-a454-40a0-b001-9750de7501cc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.309063] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "refresh_cache-9962b718-ca31-4f09-91f3-133dd68612ad" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1140.309216] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquired lock "refresh_cache-9962b718-ca31-4f09-91f3-133dd68612ad" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1140.309369] env[70020]: DEBUG nova.network.neutron [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1140.315405] env[70020]: DEBUG oslo_vmware.api [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1140.315405] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52ac238f-c4f3-044c-7f64-c8a6fd10ed05" [ 1140.315405] env[70020]: _type = "Task" [ 1140.315405] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.325169] env[70020]: DEBUG oslo_vmware.api [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ac238f-c4f3-044c-7f64-c8a6fd10ed05, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.410019] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4d64ff3a-6d8b-4447-879b-10a5db8d760f tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquiring lock "9e7bd10b-3a78-48d8-9b66-e3646635be6d" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1140.410268] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4d64ff3a-6d8b-4447-879b-10a5db8d760f tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lock "9e7bd10b-3a78-48d8-9b66-e3646635be6d" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1140.492895] env[70020]: DEBUG oslo_concurrency.lockutils [req-408eb453-852e-4ad6-a890-87b28b8ce580 req-ff14303b-ee2d-4bf9-b69e-41b2b187567a service nova] Releasing lock "refresh_cache-1e9d39d5-40fd-40b7-9421-94e0bff0314e" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1140.645087] env[70020]: DEBUG oslo_vmware.api [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52c2d1f0-909a-4bc2-4acb-ef9fff6394c4, 'name': SearchDatastore_Task, 'duration_secs': 0.033811} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.646151] env[70020]: DEBUG nova.scheduler.client.report [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updated inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with generation 156 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1140.646371] env[70020]: DEBUG nova.compute.provider_tree [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updating resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d generation from 156 to 157 during operation: update_inventory {{(pid=70020) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1140.646556] env[70020]: DEBUG nova.compute.provider_tree [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1140.649688] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-790d1e45-346c-4c06-8ace-580903e5abf6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.655015] env[70020]: DEBUG oslo_vmware.api [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Waiting for the task: (returnval){ [ 1140.655015] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52f9e4a3-1de2-3efa-6c1f-3bf6d7571c1c" [ 1140.655015] env[70020]: _type = "Task" [ 1140.655015] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.663994] env[70020]: DEBUG oslo_vmware.api [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52f9e4a3-1de2-3efa-6c1f-3bf6d7571c1c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.825461] env[70020]: DEBUG oslo_vmware.api [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ac238f-c4f3-044c-7f64-c8a6fd10ed05, 'name': SearchDatastore_Task, 'duration_secs': 0.012796} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.826146] env[70020]: DEBUG oslo_concurrency.lockutils [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1140.826426] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 1e9d39d5-40fd-40b7-9421-94e0bff0314e/1e9d39d5-40fd-40b7-9421-94e0bff0314e.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1140.826714] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-afed04f2-8172-47d4-afb4-4003a701c66a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.834416] env[70020]: DEBUG oslo_vmware.api [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1140.834416] env[70020]: value = "task-3619043" [ 1140.834416] env[70020]: _type = "Task" [ 1140.834416] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.843289] env[70020]: DEBUG oslo_vmware.api [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619043, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.845992] env[70020]: DEBUG nova.network.neutron [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1140.912938] env[70020]: DEBUG nova.compute.utils [None req-4d64ff3a-6d8b-4447-879b-10a5db8d760f tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1141.007764] env[70020]: DEBUG nova.network.neutron [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Updating instance_info_cache with network_info: [{"id": "6c9093b6-4b0e-4ac8-b1b1-4e54e43363a5", "address": "fa:16:3e:24:81:fe", "network": {"id": "ba2c4d3c-4191-4de5-8533-90ca5dfc1dc0", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-599216784-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e3eae740ef84ef88aef113ed4d6e57b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c9093b6-4b", "ovs_interfaceid": "6c9093b6-4b0e-4ac8-b1b1-4e54e43363a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1141.064941] env[70020]: DEBUG nova.compute.manager [req-25c0c49c-bf3e-4adb-aa03-f7c50d8da443 req-c2506c8d-af77-422f-810e-0e02da4a4f1e service nova] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Received event network-vif-plugged-6c9093b6-4b0e-4ac8-b1b1-4e54e43363a5 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1141.065365] env[70020]: DEBUG oslo_concurrency.lockutils [req-25c0c49c-bf3e-4adb-aa03-f7c50d8da443 req-c2506c8d-af77-422f-810e-0e02da4a4f1e service nova] Acquiring lock "9962b718-ca31-4f09-91f3-133dd68612ad-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.065731] env[70020]: DEBUG oslo_concurrency.lockutils [req-25c0c49c-bf3e-4adb-aa03-f7c50d8da443 req-c2506c8d-af77-422f-810e-0e02da4a4f1e service nova] Lock "9962b718-ca31-4f09-91f3-133dd68612ad-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.066072] env[70020]: DEBUG oslo_concurrency.lockutils [req-25c0c49c-bf3e-4adb-aa03-f7c50d8da443 req-c2506c8d-af77-422f-810e-0e02da4a4f1e service nova] Lock "9962b718-ca31-4f09-91f3-133dd68612ad-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.066380] env[70020]: DEBUG nova.compute.manager [req-25c0c49c-bf3e-4adb-aa03-f7c50d8da443 req-c2506c8d-af77-422f-810e-0e02da4a4f1e service nova] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] No waiting events found dispatching network-vif-plugged-6c9093b6-4b0e-4ac8-b1b1-4e54e43363a5 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1141.066671] env[70020]: WARNING nova.compute.manager [req-25c0c49c-bf3e-4adb-aa03-f7c50d8da443 req-c2506c8d-af77-422f-810e-0e02da4a4f1e service nova] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Received unexpected event network-vif-plugged-6c9093b6-4b0e-4ac8-b1b1-4e54e43363a5 for instance with vm_state building and task_state spawning. [ 1141.066975] env[70020]: DEBUG nova.compute.manager [req-25c0c49c-bf3e-4adb-aa03-f7c50d8da443 req-c2506c8d-af77-422f-810e-0e02da4a4f1e service nova] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Received event network-changed-6c9093b6-4b0e-4ac8-b1b1-4e54e43363a5 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1141.067275] env[70020]: DEBUG nova.compute.manager [req-25c0c49c-bf3e-4adb-aa03-f7c50d8da443 req-c2506c8d-af77-422f-810e-0e02da4a4f1e service nova] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Refreshing instance network info cache due to event network-changed-6c9093b6-4b0e-4ac8-b1b1-4e54e43363a5. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1141.067570] env[70020]: DEBUG oslo_concurrency.lockutils [req-25c0c49c-bf3e-4adb-aa03-f7c50d8da443 req-c2506c8d-af77-422f-810e-0e02da4a4f1e service nova] Acquiring lock "refresh_cache-9962b718-ca31-4f09-91f3-133dd68612ad" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1141.153394] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.342s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.153650] env[70020]: INFO nova.compute.manager [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Migrating [ 1141.160784] env[70020]: DEBUG oslo_concurrency.lockutils [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 6.947s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.181050] env[70020]: DEBUG oslo_vmware.api [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52f9e4a3-1de2-3efa-6c1f-3bf6d7571c1c, 'name': SearchDatastore_Task, 'duration_secs': 0.025107} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.181343] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1141.181604] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] d0252c4e-0991-45b9-bf0b-b8e41093e518/d0252c4e-0991-45b9-bf0b-b8e41093e518.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1141.181887] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-369f7df0-746b-483e-90b1-2597c2093c65 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.191253] env[70020]: DEBUG oslo_vmware.api [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Waiting for the task: (returnval){ [ 1141.191253] env[70020]: value = "task-3619044" [ 1141.191253] env[70020]: _type = "Task" [ 1141.191253] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.200081] env[70020]: DEBUG oslo_vmware.api [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619044, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.343895] env[70020]: DEBUG oslo_vmware.api [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619043, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.416028] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4d64ff3a-6d8b-4447-879b-10a5db8d760f tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lock "9e7bd10b-3a78-48d8-9b66-e3646635be6d" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.510644] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Releasing lock "refresh_cache-9962b718-ca31-4f09-91f3-133dd68612ad" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1141.511018] env[70020]: DEBUG nova.compute.manager [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Instance network_info: |[{"id": "6c9093b6-4b0e-4ac8-b1b1-4e54e43363a5", "address": "fa:16:3e:24:81:fe", "network": {"id": "ba2c4d3c-4191-4de5-8533-90ca5dfc1dc0", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-599216784-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e3eae740ef84ef88aef113ed4d6e57b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c9093b6-4b", "ovs_interfaceid": "6c9093b6-4b0e-4ac8-b1b1-4e54e43363a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1141.511375] env[70020]: DEBUG oslo_concurrency.lockutils [req-25c0c49c-bf3e-4adb-aa03-f7c50d8da443 req-c2506c8d-af77-422f-810e-0e02da4a4f1e service nova] Acquired lock "refresh_cache-9962b718-ca31-4f09-91f3-133dd68612ad" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1141.511562] env[70020]: DEBUG nova.network.neutron [req-25c0c49c-bf3e-4adb-aa03-f7c50d8da443 req-c2506c8d-af77-422f-810e-0e02da4a4f1e service nova] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Refreshing network info cache for port 6c9093b6-4b0e-4ac8-b1b1-4e54e43363a5 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1141.512891] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:24:81:fe', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5b21ab10-d886-4453-9472-9e11fb3c450d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6c9093b6-4b0e-4ac8-b1b1-4e54e43363a5', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1141.521989] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1141.524032] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1141.526594] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-16cd0563-3e7e-4e48-9a6d-0cf237a22948 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.541202] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "cc46e905-958e-4dc3-8f83-f8b5680f94de" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.541438] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "cc46e905-958e-4dc3-8f83-f8b5680f94de" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.548528] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1141.548528] env[70020]: value = "task-3619045" [ 1141.548528] env[70020]: _type = "Task" [ 1141.548528] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.559923] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619045, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.672891] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "refresh_cache-ce4796b0-4ad2-4468-9898-aaedce6dcd32" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1141.673137] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquired lock "refresh_cache-ce4796b0-4ad2-4468-9898-aaedce6dcd32" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1141.673274] env[70020]: DEBUG nova.network.neutron [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1141.678036] env[70020]: INFO nova.compute.claims [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1141.708962] env[70020]: DEBUG oslo_vmware.api [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619044, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.845149] env[70020]: DEBUG oslo_vmware.api [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619043, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.009713} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.845428] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 1e9d39d5-40fd-40b7-9421-94e0bff0314e/1e9d39d5-40fd-40b7-9421-94e0bff0314e.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1141.845686] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1141.845964] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d9ada4c7-3eb2-4b66-a807-caf0bbc0de01 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.852326] env[70020]: DEBUG oslo_vmware.api [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1141.852326] env[70020]: value = "task-3619046" [ 1141.852326] env[70020]: _type = "Task" [ 1141.852326] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.862021] env[70020]: DEBUG oslo_vmware.api [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619046, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.043665] env[70020]: DEBUG nova.compute.manager [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1142.060046] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619045, 'name': CreateVM_Task} progress is 25%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.187804] env[70020]: INFO nova.compute.resource_tracker [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Updating resource usage from migration 4b514c28-2530-40de-84ec-b4948bca618a [ 1142.201698] env[70020]: DEBUG oslo_vmware.api [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619044, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.805898} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.202170] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] d0252c4e-0991-45b9-bf0b-b8e41093e518/d0252c4e-0991-45b9-bf0b-b8e41093e518.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1142.202429] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1142.202708] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-78e6fbce-029e-43a1-83aa-2c6ed01b723a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.209381] env[70020]: DEBUG oslo_vmware.api [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Waiting for the task: (returnval){ [ 1142.209381] env[70020]: value = "task-3619047" [ 1142.209381] env[70020]: _type = "Task" [ 1142.209381] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.220942] env[70020]: DEBUG oslo_vmware.api [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619047, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.249039] env[70020]: DEBUG nova.network.neutron [req-25c0c49c-bf3e-4adb-aa03-f7c50d8da443 req-c2506c8d-af77-422f-810e-0e02da4a4f1e service nova] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Updated VIF entry in instance network info cache for port 6c9093b6-4b0e-4ac8-b1b1-4e54e43363a5. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1142.249418] env[70020]: DEBUG nova.network.neutron [req-25c0c49c-bf3e-4adb-aa03-f7c50d8da443 req-c2506c8d-af77-422f-810e-0e02da4a4f1e service nova] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Updating instance_info_cache with network_info: [{"id": "6c9093b6-4b0e-4ac8-b1b1-4e54e43363a5", "address": "fa:16:3e:24:81:fe", "network": {"id": "ba2c4d3c-4191-4de5-8533-90ca5dfc1dc0", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-599216784-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e3eae740ef84ef88aef113ed4d6e57b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c9093b6-4b", "ovs_interfaceid": "6c9093b6-4b0e-4ac8-b1b1-4e54e43363a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1142.364503] env[70020]: DEBUG oslo_vmware.api [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619046, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071964} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.364503] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1142.365355] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2835d99-bd39-4ee1-94e5-2c1e7488f945 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.392314] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Reconfiguring VM instance instance-0000006e to attach disk [datastore2] 1e9d39d5-40fd-40b7-9421-94e0bff0314e/1e9d39d5-40fd-40b7-9421-94e0bff0314e.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1142.397425] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1a037278-2047-4331-a3b3-334ba0976939 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.418055] env[70020]: DEBUG oslo_vmware.api [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1142.418055] env[70020]: value = "task-3619048" [ 1142.418055] env[70020]: _type = "Task" [ 1142.418055] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.427938] env[70020]: DEBUG oslo_vmware.api [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619048, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.438999] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b06716f-1efd-4d47-a1cc-cee4ed2c114c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.445191] env[70020]: DEBUG nova.network.neutron [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Updating instance_info_cache with network_info: [{"id": "ff971a6c-7fab-4c04-a75d-259986b9fce0", "address": "fa:16:3e:34:e6:98", "network": {"id": "c7c6ab1d-12b0-4699-ab0e-47b8d23ee3bc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2032377329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3f6d704dd464768953c41d34d34d944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6a6f7bb-6106-4cfd-9aef-b85628d0cefa", "external-id": "nsx-vlan-transportzone-194", "segmentation_id": 194, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff971a6c-7f", "ovs_interfaceid": "ff971a6c-7fab-4c04-a75d-259986b9fce0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1142.450976] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adbec393-4d65-465f-b641-a036b31787ea {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.484124] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94ca6f8b-06b6-4aa2-8a5a-d446bf341b44 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.491519] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4d64ff3a-6d8b-4447-879b-10a5db8d760f tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquiring lock "9e7bd10b-3a78-48d8-9b66-e3646635be6d" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1142.491764] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4d64ff3a-6d8b-4447-879b-10a5db8d760f tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lock "9e7bd10b-3a78-48d8-9b66-e3646635be6d" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1142.491986] env[70020]: INFO nova.compute.manager [None req-4d64ff3a-6d8b-4447-879b-10a5db8d760f tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Attaching volume 9e3399bb-294d-4dc8-865a-5a6fc34ad741 to /dev/sdb [ 1142.495025] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf2016f4-ff33-43d9-81d9-21ece63d6929 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.511113] env[70020]: DEBUG nova.compute.provider_tree [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1142.527317] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c19ae059-8235-4392-88af-9892b5bf16cb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.533777] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07a3a206-ae79-4021-9cd7-30198ddbe3d7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.547319] env[70020]: DEBUG nova.virt.block_device [None req-4d64ff3a-6d8b-4447-879b-10a5db8d760f tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Updating existing volume attachment record: 3fb8039d-52c7-4528-8697-3a9fbb2a3884 {{(pid=70020) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1142.563430] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619045, 'name': CreateVM_Task, 'duration_secs': 1.006658} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.563430] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1142.564190] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.564190] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1142.564447] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1142.564692] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8fd4ad14-2801-42a9-95ec-0745783259c8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.568269] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1142.569743] env[70020]: DEBUG oslo_vmware.api [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1142.569743] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52bd959a-8cbb-918e-cfb2-c348a93044be" [ 1142.569743] env[70020]: _type = "Task" [ 1142.569743] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.577600] env[70020]: DEBUG oslo_vmware.api [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52bd959a-8cbb-918e-cfb2-c348a93044be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.721232] env[70020]: DEBUG oslo_vmware.api [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619047, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069317} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.721457] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1142.722222] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cda17893-66d5-4cc7-9ee3-b68d245baae8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.741087] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] d0252c4e-0991-45b9-bf0b-b8e41093e518/d0252c4e-0991-45b9-bf0b-b8e41093e518.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1142.741331] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dc7dcd4f-9d04-4620-8dc7-90bebb395115 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.755052] env[70020]: DEBUG oslo_concurrency.lockutils [req-25c0c49c-bf3e-4adb-aa03-f7c50d8da443 req-c2506c8d-af77-422f-810e-0e02da4a4f1e service nova] Releasing lock "refresh_cache-9962b718-ca31-4f09-91f3-133dd68612ad" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1142.759723] env[70020]: DEBUG oslo_vmware.api [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Waiting for the task: (returnval){ [ 1142.759723] env[70020]: value = "task-3619050" [ 1142.759723] env[70020]: _type = "Task" [ 1142.759723] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.769706] env[70020]: DEBUG oslo_vmware.api [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619050, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.928740] env[70020]: DEBUG oslo_vmware.api [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619048, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.951458] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Releasing lock "refresh_cache-ce4796b0-4ad2-4468-9898-aaedce6dcd32" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1143.032078] env[70020]: ERROR nova.scheduler.client.report [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [req-951a9624-247a-4fcf-9ce2-b868d3bbf138] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-951a9624-247a-4fcf-9ce2-b868d3bbf138"}]} [ 1143.049835] env[70020]: DEBUG nova.scheduler.client.report [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1143.066732] env[70020]: DEBUG nova.scheduler.client.report [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1143.067032] env[70020]: DEBUG nova.compute.provider_tree [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1143.078942] env[70020]: DEBUG oslo_vmware.api [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52bd959a-8cbb-918e-cfb2-c348a93044be, 'name': SearchDatastore_Task, 'duration_secs': 0.037812} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.079864] env[70020]: DEBUG nova.scheduler.client.report [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1143.081781] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1143.082023] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1143.082274] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1143.082419] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1143.082597] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1143.083063] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ed50c424-55ce-48f1-a2dd-74ab37db0474 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.094845] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1143.095029] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1143.095938] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4657620-faed-4a2c-a1f6-37d79bd1361c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.100664] env[70020]: DEBUG nova.scheduler.client.report [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1143.104329] env[70020]: DEBUG oslo_vmware.api [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1143.104329] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]521258f7-a3bc-1fdd-3d08-167872624886" [ 1143.104329] env[70020]: _type = "Task" [ 1143.104329] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.111709] env[70020]: DEBUG oslo_vmware.api [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]521258f7-a3bc-1fdd-3d08-167872624886, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.270829] env[70020]: DEBUG oslo_vmware.api [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619050, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.301480] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f450579-12a1-466b-ba68-3792f8eee510 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.308805] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-505c6ff3-9c2b-462a-b2e1-0f8727c45dd1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.340071] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecc8a6e4-587a-4ea6-920a-37aaa4c1e266 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.347070] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a6780a2-4aa3-4f82-addc-38a3faa2d251 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.359835] env[70020]: DEBUG nova.compute.provider_tree [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1143.428247] env[70020]: DEBUG oslo_vmware.api [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619048, 'name': ReconfigVM_Task, 'duration_secs': 1.002475} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.428563] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Reconfigured VM instance instance-0000006e to attach disk [datastore2] 1e9d39d5-40fd-40b7-9421-94e0bff0314e/1e9d39d5-40fd-40b7-9421-94e0bff0314e.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1143.429265] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3b23e5a8-6e4c-4dd1-8558-d0708568e2b7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.435049] env[70020]: DEBUG oslo_vmware.api [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1143.435049] env[70020]: value = "task-3619053" [ 1143.435049] env[70020]: _type = "Task" [ 1143.435049] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.442818] env[70020]: DEBUG oslo_vmware.api [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619053, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.614467] env[70020]: DEBUG oslo_vmware.api [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]521258f7-a3bc-1fdd-3d08-167872624886, 'name': SearchDatastore_Task, 'duration_secs': 0.030141} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.615337] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ac053cd-600b-4e4f-af75-66c66d63173b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.620929] env[70020]: DEBUG oslo_vmware.api [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1143.620929] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52900903-39c6-b372-46a0-3becf2f335d3" [ 1143.620929] env[70020]: _type = "Task" [ 1143.620929] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.629593] env[70020]: DEBUG oslo_vmware.api [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52900903-39c6-b372-46a0-3becf2f335d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.772842] env[70020]: DEBUG oslo_vmware.api [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619050, 'name': ReconfigVM_Task, 'duration_secs': 0.845759} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.772842] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Reconfigured VM instance instance-0000006d to attach disk [datastore1] d0252c4e-0991-45b9-bf0b-b8e41093e518/d0252c4e-0991-45b9-bf0b-b8e41093e518.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1143.772842] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4fe8d8f7-0c21-4a28-b5fb-9a458ffebc50 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.778065] env[70020]: DEBUG oslo_vmware.api [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Waiting for the task: (returnval){ [ 1143.778065] env[70020]: value = "task-3619054" [ 1143.778065] env[70020]: _type = "Task" [ 1143.778065] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.788418] env[70020]: DEBUG oslo_vmware.api [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619054, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.894463] env[70020]: DEBUG nova.scheduler.client.report [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Updated inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with generation 158 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1143.894775] env[70020]: DEBUG nova.compute.provider_tree [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Updating resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d generation from 158 to 159 during operation: update_inventory {{(pid=70020) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1143.894993] env[70020]: DEBUG nova.compute.provider_tree [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1143.946144] env[70020]: DEBUG oslo_vmware.api [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619053, 'name': Rename_Task, 'duration_secs': 0.239446} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.946439] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1143.946699] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-28b65238-01ef-4e35-a5ec-5e30daf36827 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.953923] env[70020]: DEBUG oslo_vmware.api [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1143.953923] env[70020]: value = "task-3619055" [ 1143.953923] env[70020]: _type = "Task" [ 1143.953923] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.964350] env[70020]: DEBUG oslo_vmware.api [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619055, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.134019] env[70020]: DEBUG oslo_vmware.api [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52900903-39c6-b372-46a0-3becf2f335d3, 'name': SearchDatastore_Task, 'duration_secs': 0.017516} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.134451] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1144.134578] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 9962b718-ca31-4f09-91f3-133dd68612ad/9962b718-ca31-4f09-91f3-133dd68612ad.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1144.134853] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3e6db218-8d2f-4419-9f93-57d15f6af0fd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.142249] env[70020]: DEBUG oslo_vmware.api [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1144.142249] env[70020]: value = "task-3619056" [ 1144.142249] env[70020]: _type = "Task" [ 1144.142249] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.150824] env[70020]: DEBUG oslo_vmware.api [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619056, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.290566] env[70020]: DEBUG oslo_vmware.api [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619054, 'name': Rename_Task, 'duration_secs': 0.282058} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.290967] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1144.291120] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-64206043-c477-459f-acf0-966e24900aa8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.297729] env[70020]: DEBUG oslo_vmware.api [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Waiting for the task: (returnval){ [ 1144.297729] env[70020]: value = "task-3619057" [ 1144.297729] env[70020]: _type = "Task" [ 1144.297729] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.306306] env[70020]: DEBUG oslo_vmware.api [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619057, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.400739] env[70020]: DEBUG oslo_concurrency.lockutils [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 3.240s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1144.401074] env[70020]: INFO nova.compute.manager [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Migrating [ 1144.408681] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 6.227s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1144.408949] env[70020]: DEBUG nova.objects.instance [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Trying to apply a migration context that does not seem to be set for this instance {{(pid=70020) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1144.468331] env[70020]: DEBUG oslo_vmware.api [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619055, 'name': PowerOnVM_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.469708] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc73e236-454d-4d47-ac55-7a39d640ccf6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.490593] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Updating instance 'ce4796b0-4ad2-4468-9898-aaedce6dcd32' progress to 0 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1144.653159] env[70020]: DEBUG oslo_vmware.api [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619056, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.807252] env[70020]: DEBUG oslo_vmware.api [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619057, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.925250] env[70020]: DEBUG oslo_concurrency.lockutils [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "refresh_cache-c0a78ace-307e-4156-beb3-a53061acff7f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1144.925551] env[70020]: DEBUG oslo_concurrency.lockutils [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquired lock "refresh_cache-c0a78ace-307e-4156-beb3-a53061acff7f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1144.925833] env[70020]: DEBUG nova.network.neutron [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1144.965198] env[70020]: DEBUG oslo_vmware.api [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619055, 'name': PowerOnVM_Task, 'duration_secs': 0.68578} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.965529] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1144.965817] env[70020]: INFO nova.compute.manager [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Took 8.27 seconds to spawn the instance on the hypervisor. [ 1144.966107] env[70020]: DEBUG nova.compute.manager [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1144.966940] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73a575cd-518d-416e-9502-db92d7247e56 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.997938] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1144.998327] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5979e78a-1325-4844-b0c7-b809a630c3e1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.006616] env[70020]: DEBUG oslo_vmware.api [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1145.006616] env[70020]: value = "task-3619058" [ 1145.006616] env[70020]: _type = "Task" [ 1145.006616] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.015738] env[70020]: DEBUG oslo_vmware.api [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619058, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.153849] env[70020]: DEBUG oslo_vmware.api [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619056, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.569061} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.154137] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 9962b718-ca31-4f09-91f3-133dd68612ad/9962b718-ca31-4f09-91f3-133dd68612ad.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1145.154357] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1145.154612] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5bfa239c-32fc-4c95-9743-320ab9873631 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.161337] env[70020]: DEBUG oslo_vmware.api [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1145.161337] env[70020]: value = "task-3619059" [ 1145.161337] env[70020]: _type = "Task" [ 1145.161337] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.170643] env[70020]: DEBUG oslo_vmware.api [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619059, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.244911] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c31a5b76-4d91-4f59-bb14-4164b337ab03 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "f9d4837f-0e3f-4a83-9055-04d17ef3eb23" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1145.245223] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c31a5b76-4d91-4f59-bb14-4164b337ab03 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "f9d4837f-0e3f-4a83-9055-04d17ef3eb23" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1145.308391] env[70020]: DEBUG oslo_vmware.api [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619057, 'name': PowerOnVM_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.428952] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b46ecc24-3395-4e04-95b1-ca8b7bab5955 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.020s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1145.432207] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.864s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1145.433732] env[70020]: INFO nova.compute.claims [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1145.488470] env[70020]: INFO nova.compute.manager [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Took 23.55 seconds to build instance. [ 1145.516457] env[70020]: DEBUG oslo_vmware.api [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619058, 'name': PowerOffVM_Task, 'duration_secs': 0.282459} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.517073] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1145.517073] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Updating instance 'ce4796b0-4ad2-4468-9898-aaedce6dcd32' progress to 17 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1145.627921] env[70020]: DEBUG nova.network.neutron [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Updating instance_info_cache with network_info: [{"id": "4332b789-1993-4df4-8099-15089bf507db", "address": "fa:16:3e:1a:0a:5b", "network": {"id": "83a3ee04-e0ff-40e7-ae51-c463add43abb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2003290189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cfa7d3b1f5a14c60b19cde5030c2f0a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4332b789-19", "ovs_interfaceid": "4332b789-1993-4df4-8099-15089bf507db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1145.673683] env[70020]: DEBUG oslo_vmware.api [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619059, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.201846} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.673960] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1145.674821] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf1aa39b-b950-45da-936a-3eec154074a9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.696886] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] 9962b718-ca31-4f09-91f3-133dd68612ad/9962b718-ca31-4f09-91f3-133dd68612ad.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1145.697194] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dfb7a8bc-df69-4bd3-8789-61b9e4289cb7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.719369] env[70020]: DEBUG oslo_vmware.api [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1145.719369] env[70020]: value = "task-3619060" [ 1145.719369] env[70020]: _type = "Task" [ 1145.719369] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.727634] env[70020]: DEBUG oslo_vmware.api [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619060, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.748937] env[70020]: DEBUG nova.compute.utils [None req-c31a5b76-4d91-4f59-bb14-4164b337ab03 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1145.808723] env[70020]: DEBUG oslo_vmware.api [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619057, 'name': PowerOnVM_Task, 'duration_secs': 1.090859} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.808991] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1145.809208] env[70020]: DEBUG nova.compute.manager [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1145.809958] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77cc4b26-f81b-4dcc-a52d-432887b00aa6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.990349] env[70020]: DEBUG oslo_concurrency.lockutils [None req-25e76371-c3ad-4a75-bfb4-ca621e6e06af tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "1e9d39d5-40fd-40b7-9421-94e0bff0314e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.058s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1146.025197] env[70020]: DEBUG nova.virt.hardware [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1146.025461] env[70020]: DEBUG nova.virt.hardware [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1146.025648] env[70020]: DEBUG nova.virt.hardware [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1146.025940] env[70020]: DEBUG nova.virt.hardware [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1146.026130] env[70020]: DEBUG nova.virt.hardware [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1146.026280] env[70020]: DEBUG nova.virt.hardware [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1146.026486] env[70020]: DEBUG nova.virt.hardware [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1146.026642] env[70020]: DEBUG nova.virt.hardware [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1146.026914] env[70020]: DEBUG nova.virt.hardware [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1146.027115] env[70020]: DEBUG nova.virt.hardware [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1146.027293] env[70020]: DEBUG nova.virt.hardware [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1146.032271] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b2dc7752-dc29-45e4-bcd9-3a2885dc39ee {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.048520] env[70020]: DEBUG oslo_vmware.api [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1146.048520] env[70020]: value = "task-3619061" [ 1146.048520] env[70020]: _type = "Task" [ 1146.048520] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.057114] env[70020]: DEBUG oslo_vmware.api [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619061, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.130625] env[70020]: DEBUG oslo_concurrency.lockutils [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Releasing lock "refresh_cache-c0a78ace-307e-4156-beb3-a53061acff7f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1146.230573] env[70020]: DEBUG oslo_vmware.api [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619060, 'name': ReconfigVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.251515] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c31a5b76-4d91-4f59-bb14-4164b337ab03 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "f9d4837f-0e3f-4a83-9055-04d17ef3eb23" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1146.324904] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1146.561758] env[70020]: DEBUG oslo_vmware.api [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619061, 'name': ReconfigVM_Task, 'duration_secs': 0.363047} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.562114] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Updating instance 'ce4796b0-4ad2-4468-9898-aaedce6dcd32' progress to 33 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1146.638672] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-494cc55c-ee46-409d-bc47-a65a2493f0bb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.647203] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26186639-81c2-42a7-aed5-f108657770b4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.680236] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be840ddf-fee8-4df2-8ed8-2a5ebe7f2d96 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.687874] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e7b42ae-375a-4339-816e-ac718888c45f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.701714] env[70020]: DEBUG nova.compute.provider_tree [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1146.728862] env[70020]: DEBUG oslo_vmware.api [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619060, 'name': ReconfigVM_Task, 'duration_secs': 0.550477} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.729091] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Reconfigured VM instance instance-0000006f to attach disk [datastore1] 9962b718-ca31-4f09-91f3-133dd68612ad/9962b718-ca31-4f09-91f3-133dd68612ad.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1146.729687] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5b132b4a-c7d2-4c82-96be-1c065e0fb538 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.735964] env[70020]: DEBUG oslo_vmware.api [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1146.735964] env[70020]: value = "task-3619062" [ 1146.735964] env[70020]: _type = "Task" [ 1146.735964] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.743541] env[70020]: DEBUG oslo_vmware.api [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619062, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.016556] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e2e55643-1cfd-4f61-accb-f43cef2abc6e tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "1e9d39d5-40fd-40b7-9421-94e0bff0314e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1147.016851] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e2e55643-1cfd-4f61-accb-f43cef2abc6e tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "1e9d39d5-40fd-40b7-9421-94e0bff0314e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1147.017074] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e2e55643-1cfd-4f61-accb-f43cef2abc6e tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "1e9d39d5-40fd-40b7-9421-94e0bff0314e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1147.017260] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e2e55643-1cfd-4f61-accb-f43cef2abc6e tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "1e9d39d5-40fd-40b7-9421-94e0bff0314e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1147.017424] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e2e55643-1cfd-4f61-accb-f43cef2abc6e tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "1e9d39d5-40fd-40b7-9421-94e0bff0314e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1147.019424] env[70020]: INFO nova.compute.manager [None req-e2e55643-1cfd-4f61-accb-f43cef2abc6e tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Terminating instance [ 1147.070454] env[70020]: DEBUG nova.virt.hardware [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1147.070708] env[70020]: DEBUG nova.virt.hardware [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1147.070881] env[70020]: DEBUG nova.virt.hardware [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1147.071087] env[70020]: DEBUG nova.virt.hardware [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1147.071237] env[70020]: DEBUG nova.virt.hardware [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1147.071380] env[70020]: DEBUG nova.virt.hardware [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1147.071579] env[70020]: DEBUG nova.virt.hardware [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1147.071733] env[70020]: DEBUG nova.virt.hardware [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1147.071896] env[70020]: DEBUG nova.virt.hardware [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1147.072086] env[70020]: DEBUG nova.virt.hardware [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1147.072297] env[70020]: DEBUG nova.virt.hardware [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1147.077744] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Reconfiguring VM instance instance-0000006a to detach disk 2000 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1147.078014] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-290ec152-bc5a-43e8-ba40-f6307405d480 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.095495] env[70020]: DEBUG oslo_vmware.api [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1147.095495] env[70020]: value = "task-3619063" [ 1147.095495] env[70020]: _type = "Task" [ 1147.095495] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.103083] env[70020]: DEBUG oslo_vmware.api [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619063, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.204511] env[70020]: DEBUG nova.scheduler.client.report [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1147.245284] env[70020]: DEBUG oslo_vmware.api [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619062, 'name': Rename_Task, 'duration_secs': 0.187136} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.245514] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1147.245934] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-708f76a3-5062-4081-bbb5-30dc2300ce2e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.252391] env[70020]: DEBUG oslo_vmware.api [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1147.252391] env[70020]: value = "task-3619064" [ 1147.252391] env[70020]: _type = "Task" [ 1147.252391] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.260218] env[70020]: DEBUG oslo_vmware.api [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619064, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.324074] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c31a5b76-4d91-4f59-bb14-4164b337ab03 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "f9d4837f-0e3f-4a83-9055-04d17ef3eb23" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1147.324366] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c31a5b76-4d91-4f59-bb14-4164b337ab03 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "f9d4837f-0e3f-4a83-9055-04d17ef3eb23" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1147.324603] env[70020]: INFO nova.compute.manager [None req-c31a5b76-4d91-4f59-bb14-4164b337ab03 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Attaching volume 68b7bed5-35e8-4ee8-81ea-460623d98a8b to /dev/sdb [ 1147.356580] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45f94e03-a401-4818-8148-1e9b8e1469d8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.364168] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8994c2ca-7ee1-41d2-84e1-44fe52dcd8d1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.377723] env[70020]: DEBUG nova.virt.block_device [None req-c31a5b76-4d91-4f59-bb14-4164b337ab03 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Updating existing volume attachment record: 3fa49fbd-f767-4485-bdee-d9b11da1f192 {{(pid=70020) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1147.498448] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d3542fe8-02b3-42b0-942d-d92f55675403 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Acquiring lock "d0252c4e-0991-45b9-bf0b-b8e41093e518" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1147.498614] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d3542fe8-02b3-42b0-942d-d92f55675403 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Lock "d0252c4e-0991-45b9-bf0b-b8e41093e518" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1147.498856] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d3542fe8-02b3-42b0-942d-d92f55675403 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Acquiring lock "d0252c4e-0991-45b9-bf0b-b8e41093e518-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1147.499088] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d3542fe8-02b3-42b0-942d-d92f55675403 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Lock "d0252c4e-0991-45b9-bf0b-b8e41093e518-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1147.499297] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d3542fe8-02b3-42b0-942d-d92f55675403 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Lock "d0252c4e-0991-45b9-bf0b-b8e41093e518-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1147.501592] env[70020]: INFO nova.compute.manager [None req-d3542fe8-02b3-42b0-942d-d92f55675403 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Terminating instance [ 1147.522977] env[70020]: DEBUG nova.compute.manager [None req-e2e55643-1cfd-4f61-accb-f43cef2abc6e tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1147.523223] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e55643-1cfd-4f61-accb-f43cef2abc6e tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1147.524143] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd086757-6909-44b2-9955-4fe7515c5b58 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.531304] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2e55643-1cfd-4f61-accb-f43cef2abc6e tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1147.531532] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-560c7931-3ea4-47b1-b5c2-6af954595c27 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.538714] env[70020]: DEBUG oslo_vmware.api [None req-e2e55643-1cfd-4f61-accb-f43cef2abc6e tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1147.538714] env[70020]: value = "task-3619066" [ 1147.538714] env[70020]: _type = "Task" [ 1147.538714] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.547874] env[70020]: DEBUG oslo_vmware.api [None req-e2e55643-1cfd-4f61-accb-f43cef2abc6e tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619066, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.606985] env[70020]: DEBUG oslo_vmware.api [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619063, 'name': ReconfigVM_Task, 'duration_secs': 0.170056} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.607346] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Reconfigured VM instance instance-0000006a to detach disk 2000 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1147.608261] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7727139-d48e-4e1e-945c-ee3f98d020b0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.631244] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] ce4796b0-4ad2-4468-9898-aaedce6dcd32/ce4796b0-4ad2-4468-9898-aaedce6dcd32.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1147.631518] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5282de64-335c-47ee-8479-b648755b6c58 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.650211] env[70020]: DEBUG oslo_vmware.api [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1147.650211] env[70020]: value = "task-3619067" [ 1147.650211] env[70020]: _type = "Task" [ 1147.650211] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.652268] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90ef6a0a-3673-4f0d-9a44-532e91ffb8c1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.676983] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Updating instance 'c0a78ace-307e-4156-beb3-a53061acff7f' progress to 0 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1147.684639] env[70020]: DEBUG oslo_vmware.api [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619067, 'name': ReconfigVM_Task} progress is 10%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.710363] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.278s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1147.712047] env[70020]: DEBUG nova.compute.manager [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1147.713792] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 1.389s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1147.714061] env[70020]: DEBUG nova.objects.instance [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Trying to apply a migration context that does not seem to be set for this instance {{(pid=70020) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1147.764997] env[70020]: DEBUG oslo_vmware.api [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619064, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.005786] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d3542fe8-02b3-42b0-942d-d92f55675403 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Acquiring lock "refresh_cache-d0252c4e-0991-45b9-bf0b-b8e41093e518" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1148.005999] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d3542fe8-02b3-42b0-942d-d92f55675403 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Acquired lock "refresh_cache-d0252c4e-0991-45b9-bf0b-b8e41093e518" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1148.006199] env[70020]: DEBUG nova.network.neutron [None req-d3542fe8-02b3-42b0-942d-d92f55675403 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1148.047847] env[70020]: DEBUG oslo_vmware.api [None req-e2e55643-1cfd-4f61-accb-f43cef2abc6e tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619066, 'name': PowerOffVM_Task, 'duration_secs': 0.176147} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.048121] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2e55643-1cfd-4f61-accb-f43cef2abc6e tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1148.048285] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e55643-1cfd-4f61-accb-f43cef2abc6e tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1148.048536] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-21035a20-1959-4445-91b6-4c73c854f52a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.134295] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e55643-1cfd-4f61-accb-f43cef2abc6e tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1148.134519] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e55643-1cfd-4f61-accb-f43cef2abc6e tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1148.134697] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2e55643-1cfd-4f61-accb-f43cef2abc6e tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Deleting the datastore file [datastore2] 1e9d39d5-40fd-40b7-9421-94e0bff0314e {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1148.134998] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e697c797-110d-4147-b725-ddabff0b482d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.141245] env[70020]: DEBUG oslo_vmware.api [None req-e2e55643-1cfd-4f61-accb-f43cef2abc6e tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1148.141245] env[70020]: value = "task-3619069" [ 1148.141245] env[70020]: _type = "Task" [ 1148.141245] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.150266] env[70020]: DEBUG oslo_vmware.api [None req-e2e55643-1cfd-4f61-accb-f43cef2abc6e tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619069, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.160153] env[70020]: DEBUG oslo_vmware.api [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619067, 'name': ReconfigVM_Task, 'duration_secs': 0.280311} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.160408] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Reconfigured VM instance instance-0000006a to attach disk [datastore1] ce4796b0-4ad2-4468-9898-aaedce6dcd32/ce4796b0-4ad2-4468-9898-aaedce6dcd32.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1148.160662] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Updating instance 'ce4796b0-4ad2-4468-9898-aaedce6dcd32' progress to 50 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1148.187355] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1148.187524] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a8033589-e89f-49e7-a1f0-8d5c5600121e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.193179] env[70020]: DEBUG oslo_vmware.api [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 1148.193179] env[70020]: value = "task-3619070" [ 1148.193179] env[70020]: _type = "Task" [ 1148.193179] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.201348] env[70020]: DEBUG oslo_vmware.api [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3619070, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.218508] env[70020]: DEBUG nova.compute.utils [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1148.222565] env[70020]: DEBUG nova.compute.manager [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1148.222727] env[70020]: DEBUG nova.network.neutron [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1148.263471] env[70020]: DEBUG oslo_vmware.api [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619064, 'name': PowerOnVM_Task, 'duration_secs': 0.568851} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.263790] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1148.264052] env[70020]: INFO nova.compute.manager [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Took 9.25 seconds to spawn the instance on the hypervisor. [ 1148.264254] env[70020]: DEBUG nova.compute.manager [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1148.265015] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad2c9e46-bd25-42bd-8849-4f0fa99c1c79 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.270990] env[70020]: DEBUG nova.policy [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '914fc4078a214da891e7d12d242504cb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0573da12f56f4b18a103e4e9fdfb9c19', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1148.527896] env[70020]: DEBUG nova.network.neutron [None req-d3542fe8-02b3-42b0-942d-d92f55675403 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1148.593413] env[70020]: DEBUG nova.network.neutron [None req-d3542fe8-02b3-42b0-942d-d92f55675403 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1148.631186] env[70020]: DEBUG nova.network.neutron [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Successfully created port: 0c413503-8ce1-454a-a6b0-3fb75d647a04 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1148.651017] env[70020]: DEBUG oslo_vmware.api [None req-e2e55643-1cfd-4f61-accb-f43cef2abc6e tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619069, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.369991} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.651280] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2e55643-1cfd-4f61-accb-f43cef2abc6e tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1148.651466] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e55643-1cfd-4f61-accb-f43cef2abc6e tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1148.651640] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e55643-1cfd-4f61-accb-f43cef2abc6e tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1148.651809] env[70020]: INFO nova.compute.manager [None req-e2e55643-1cfd-4f61-accb-f43cef2abc6e tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1148.652059] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e2e55643-1cfd-4f61-accb-f43cef2abc6e tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1148.652251] env[70020]: DEBUG nova.compute.manager [-] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1148.652343] env[70020]: DEBUG nova.network.neutron [-] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1148.667769] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e6ac5cc-021a-4dde-b82b-b47389170a83 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.688397] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5f84f2c-1ed6-4a86-b224-968bebdb0c4e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.706457] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Updating instance 'ce4796b0-4ad2-4468-9898-aaedce6dcd32' progress to 67 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1148.720016] env[70020]: DEBUG oslo_vmware.api [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3619070, 'name': PowerOffVM_Task, 'duration_secs': 0.315226} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.720016] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1148.720016] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Updating instance 'c0a78ace-307e-4156-beb3-a53061acff7f' progress to 17 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1148.722701] env[70020]: DEBUG nova.compute.manager [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1148.725907] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4ce959b0-56fc-4330-bcb7-0f62cabdc063 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1148.788342] env[70020]: INFO nova.compute.manager [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Took 23.68 seconds to build instance. [ 1149.062173] env[70020]: DEBUG nova.compute.manager [req-fed5840e-5ced-4929-87b5-ebe095c3a2a4 req-30c79019-0d33-4826-b50d-1a14a1656390 service nova] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Received event network-vif-deleted-7716680a-01d2-46d9-8812-272c03afab45 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1149.062375] env[70020]: INFO nova.compute.manager [req-fed5840e-5ced-4929-87b5-ebe095c3a2a4 req-30c79019-0d33-4826-b50d-1a14a1656390 service nova] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Neutron deleted interface 7716680a-01d2-46d9-8812-272c03afab45; detaching it from the instance and deleting it from the info cache [ 1149.062550] env[70020]: DEBUG nova.network.neutron [req-fed5840e-5ced-4929-87b5-ebe095c3a2a4 req-30c79019-0d33-4826-b50d-1a14a1656390 service nova] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1149.099269] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d3542fe8-02b3-42b0-942d-d92f55675403 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Releasing lock "refresh_cache-d0252c4e-0991-45b9-bf0b-b8e41093e518" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1149.099551] env[70020]: DEBUG nova.compute.manager [None req-d3542fe8-02b3-42b0-942d-d92f55675403 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1149.099741] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d3542fe8-02b3-42b0-942d-d92f55675403 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1149.101321] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aa5b20a-4f83-4d72-aea7-ff2722d48f1f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.110306] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3542fe8-02b3-42b0-942d-d92f55675403 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1149.110600] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a5fb8abe-c91a-4658-827b-4d14dba130bc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.116319] env[70020]: DEBUG oslo_vmware.api [None req-d3542fe8-02b3-42b0-942d-d92f55675403 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Waiting for the task: (returnval){ [ 1149.116319] env[70020]: value = "task-3619072" [ 1149.116319] env[70020]: _type = "Task" [ 1149.116319] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.124063] env[70020]: DEBUG oslo_vmware.api [None req-d3542fe8-02b3-42b0-942d-d92f55675403 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619072, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.225026] env[70020]: DEBUG nova.virt.hardware [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1149.225229] env[70020]: DEBUG nova.virt.hardware [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1149.225376] env[70020]: DEBUG nova.virt.hardware [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1149.225654] env[70020]: DEBUG nova.virt.hardware [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1149.225901] env[70020]: DEBUG nova.virt.hardware [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1149.226146] env[70020]: DEBUG nova.virt.hardware [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1149.226429] env[70020]: DEBUG nova.virt.hardware [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1149.226685] env[70020]: DEBUG nova.virt.hardware [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1149.226973] env[70020]: DEBUG nova.virt.hardware [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1149.227859] env[70020]: DEBUG nova.virt.hardware [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1149.227859] env[70020]: DEBUG nova.virt.hardware [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1149.236782] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7de896a0-31ac-4e5d-842f-26698bd0d671 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.257336] env[70020]: DEBUG nova.network.neutron [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Port ff971a6c-7fab-4c04-a75d-259986b9fce0 binding to destination host cpu-1 is already ACTIVE {{(pid=70020) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1149.264705] env[70020]: DEBUG oslo_vmware.api [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 1149.264705] env[70020]: value = "task-3619073" [ 1149.264705] env[70020]: _type = "Task" [ 1149.264705] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.276723] env[70020]: DEBUG oslo_vmware.api [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3619073, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.290711] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5746a2eb-4a68-4653-bfc3-47ab4eb7e1e9 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "9962b718-ca31-4f09-91f3-133dd68612ad" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.193s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1149.540731] env[70020]: DEBUG nova.network.neutron [-] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1149.564674] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6811bac2-9e38-4afe-b654-e768ee1014eb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.574097] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68e20149-1f97-4b4c-8ab0-82245ef895f8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.612744] env[70020]: DEBUG nova.compute.manager [req-fed5840e-5ced-4929-87b5-ebe095c3a2a4 req-30c79019-0d33-4826-b50d-1a14a1656390 service nova] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Detach interface failed, port_id=7716680a-01d2-46d9-8812-272c03afab45, reason: Instance 1e9d39d5-40fd-40b7-9421-94e0bff0314e could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1149.625382] env[70020]: DEBUG oslo_vmware.api [None req-d3542fe8-02b3-42b0-942d-d92f55675403 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619072, 'name': PowerOffVM_Task, 'duration_secs': 0.123211} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.625595] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3542fe8-02b3-42b0-942d-d92f55675403 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1149.625792] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d3542fe8-02b3-42b0-942d-d92f55675403 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1149.626148] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-748c5197-1def-4088-bd41-4b2d63657f0b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.649480] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d3542fe8-02b3-42b0-942d-d92f55675403 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1149.649715] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d3542fe8-02b3-42b0-942d-d92f55675403 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1149.649855] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3542fe8-02b3-42b0-942d-d92f55675403 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Deleting the datastore file [datastore1] d0252c4e-0991-45b9-bf0b-b8e41093e518 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1149.650118] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ce6eccad-faa0-4752-80c4-e43a1b9ec7ff {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.657758] env[70020]: DEBUG oslo_vmware.api [None req-d3542fe8-02b3-42b0-942d-d92f55675403 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Waiting for the task: (returnval){ [ 1149.657758] env[70020]: value = "task-3619076" [ 1149.657758] env[70020]: _type = "Task" [ 1149.657758] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.666059] env[70020]: DEBUG oslo_vmware.api [None req-d3542fe8-02b3-42b0-942d-d92f55675403 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619076, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.760387] env[70020]: DEBUG nova.compute.manager [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1149.775345] env[70020]: DEBUG oslo_vmware.api [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3619073, 'name': ReconfigVM_Task, 'duration_secs': 0.251763} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.775525] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Updating instance 'c0a78ace-307e-4156-beb3-a53061acff7f' progress to 33 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1149.796836] env[70020]: DEBUG nova.virt.hardware [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1149.797131] env[70020]: DEBUG nova.virt.hardware [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1149.797277] env[70020]: DEBUG nova.virt.hardware [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1149.797456] env[70020]: DEBUG nova.virt.hardware [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1149.797601] env[70020]: DEBUG nova.virt.hardware [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1149.797742] env[70020]: DEBUG nova.virt.hardware [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1149.797950] env[70020]: DEBUG nova.virt.hardware [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1149.798201] env[70020]: DEBUG nova.virt.hardware [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1149.798294] env[70020]: DEBUG nova.virt.hardware [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1149.798457] env[70020]: DEBUG nova.virt.hardware [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1149.798627] env[70020]: DEBUG nova.virt.hardware [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1149.799788] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65d056f9-2ff1-41a5-9524-c2e0d0bbbc49 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.808959] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2c4496d-2f91-48b2-b653-6025a8df1b3e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.826948] env[70020]: DEBUG nova.compute.manager [req-4bab1df9-0c8e-436b-9bff-c246d2cb6323 req-20bac43c-879d-4454-a43b-1d623ee86833 service nova] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Received event network-changed-6c9093b6-4b0e-4ac8-b1b1-4e54e43363a5 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1149.827160] env[70020]: DEBUG nova.compute.manager [req-4bab1df9-0c8e-436b-9bff-c246d2cb6323 req-20bac43c-879d-4454-a43b-1d623ee86833 service nova] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Refreshing instance network info cache due to event network-changed-6c9093b6-4b0e-4ac8-b1b1-4e54e43363a5. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1149.827370] env[70020]: DEBUG oslo_concurrency.lockutils [req-4bab1df9-0c8e-436b-9bff-c246d2cb6323 req-20bac43c-879d-4454-a43b-1d623ee86833 service nova] Acquiring lock "refresh_cache-9962b718-ca31-4f09-91f3-133dd68612ad" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.827512] env[70020]: DEBUG oslo_concurrency.lockutils [req-4bab1df9-0c8e-436b-9bff-c246d2cb6323 req-20bac43c-879d-4454-a43b-1d623ee86833 service nova] Acquired lock "refresh_cache-9962b718-ca31-4f09-91f3-133dd68612ad" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1149.827668] env[70020]: DEBUG nova.network.neutron [req-4bab1df9-0c8e-436b-9bff-c246d2cb6323 req-20bac43c-879d-4454-a43b-1d623ee86833 service nova] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Refreshing network info cache for port 6c9093b6-4b0e-4ac8-b1b1-4e54e43363a5 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1150.043010] env[70020]: INFO nova.compute.manager [-] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Took 1.39 seconds to deallocate network for instance. [ 1150.135885] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1150.135885] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1150.167871] env[70020]: DEBUG oslo_vmware.api [None req-d3542fe8-02b3-42b0-942d-d92f55675403 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619076, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140031} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.168163] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3542fe8-02b3-42b0-942d-d92f55675403 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1150.168353] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d3542fe8-02b3-42b0-942d-d92f55675403 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1150.168531] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d3542fe8-02b3-42b0-942d-d92f55675403 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1150.168696] env[70020]: INFO nova.compute.manager [None req-d3542fe8-02b3-42b0-942d-d92f55675403 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Took 1.07 seconds to destroy the instance on the hypervisor. [ 1150.168927] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d3542fe8-02b3-42b0-942d-d92f55675403 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1150.169123] env[70020]: DEBUG nova.compute.manager [-] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1150.169220] env[70020]: DEBUG nova.network.neutron [-] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1150.189868] env[70020]: DEBUG nova.network.neutron [-] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1150.280380] env[70020]: DEBUG nova.network.neutron [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Successfully updated port: 0c413503-8ce1-454a-a6b0-3fb75d647a04 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1150.284020] env[70020]: DEBUG nova.virt.hardware [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1150.284020] env[70020]: DEBUG nova.virt.hardware [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1150.284020] env[70020]: DEBUG nova.virt.hardware [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1150.284020] env[70020]: DEBUG nova.virt.hardware [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1150.284020] env[70020]: DEBUG nova.virt.hardware [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1150.284020] env[70020]: DEBUG nova.virt.hardware [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1150.284285] env[70020]: DEBUG nova.virt.hardware [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1150.284331] env[70020]: DEBUG nova.virt.hardware [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1150.284489] env[70020]: DEBUG nova.virt.hardware [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1150.284763] env[70020]: DEBUG nova.virt.hardware [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1150.284966] env[70020]: DEBUG nova.virt.hardware [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1150.290778] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Reconfiguring VM instance instance-0000006b to detach disk 2000 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1150.294123] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8350a3c0-26b2-4799-ac13-4c6527cc1f40 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.311302] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "ce4796b0-4ad2-4468-9898-aaedce6dcd32-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1150.311517] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "ce4796b0-4ad2-4468-9898-aaedce6dcd32-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1150.311678] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "ce4796b0-4ad2-4468-9898-aaedce6dcd32-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1150.317888] env[70020]: DEBUG oslo_vmware.api [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 1150.317888] env[70020]: value = "task-3619077" [ 1150.317888] env[70020]: _type = "Task" [ 1150.317888] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.326941] env[70020]: DEBUG oslo_vmware.api [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3619077, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.544874] env[70020]: DEBUG nova.network.neutron [req-4bab1df9-0c8e-436b-9bff-c246d2cb6323 req-20bac43c-879d-4454-a43b-1d623ee86833 service nova] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Updated VIF entry in instance network info cache for port 6c9093b6-4b0e-4ac8-b1b1-4e54e43363a5. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1150.545403] env[70020]: DEBUG nova.network.neutron [req-4bab1df9-0c8e-436b-9bff-c246d2cb6323 req-20bac43c-879d-4454-a43b-1d623ee86833 service nova] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Updating instance_info_cache with network_info: [{"id": "6c9093b6-4b0e-4ac8-b1b1-4e54e43363a5", "address": "fa:16:3e:24:81:fe", "network": {"id": "ba2c4d3c-4191-4de5-8533-90ca5dfc1dc0", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-599216784-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e3eae740ef84ef88aef113ed4d6e57b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c9093b6-4b", "ovs_interfaceid": "6c9093b6-4b0e-4ac8-b1b1-4e54e43363a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.549294] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e2e55643-1cfd-4f61-accb-f43cef2abc6e tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1150.549871] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e2e55643-1cfd-4f61-accb-f43cef2abc6e tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1150.549949] env[70020]: DEBUG nova.objects.instance [None req-e2e55643-1cfd-4f61-accb-f43cef2abc6e tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lazy-loading 'resources' on Instance uuid 1e9d39d5-40fd-40b7-9421-94e0bff0314e {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1150.643858] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1150.643858] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1150.643858] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1150.643858] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1150.643858] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1150.643858] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1150.643858] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=70020) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1150.643858] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager.update_available_resource {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1150.692373] env[70020]: DEBUG nova.network.neutron [-] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.792925] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "refresh_cache-cc46e905-958e-4dc3-8f83-f8b5680f94de" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1150.793104] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquired lock "refresh_cache-cc46e905-958e-4dc3-8f83-f8b5680f94de" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1150.793263] env[70020]: DEBUG nova.network.neutron [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1150.828408] env[70020]: DEBUG oslo_vmware.api [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3619077, 'name': ReconfigVM_Task, 'duration_secs': 0.332528} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.828684] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Reconfigured VM instance instance-0000006b to detach disk 2000 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1150.829477] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c9aafd-ec57-4ebc-885f-4ec18061ecb9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.852182] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] c0a78ace-307e-4156-beb3-a53061acff7f/c0a78ace-307e-4156-beb3-a53061acff7f.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1150.852464] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c5cf1951-aef5-4cf2-858a-b19e57850638 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.871086] env[70020]: DEBUG oslo_vmware.api [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 1150.871086] env[70020]: value = "task-3619078" [ 1150.871086] env[70020]: _type = "Task" [ 1150.871086] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.878835] env[70020]: DEBUG oslo_vmware.api [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3619078, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.048437] env[70020]: DEBUG oslo_concurrency.lockutils [req-4bab1df9-0c8e-436b-9bff-c246d2cb6323 req-20bac43c-879d-4454-a43b-1d623ee86833 service nova] Releasing lock "refresh_cache-9962b718-ca31-4f09-91f3-133dd68612ad" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1151.089297] env[70020]: DEBUG nova.compute.manager [req-f9f8fcf6-1842-4445-944d-53519ce5da52 req-31f5ae4b-6f50-46c5-97ce-8552d480820b service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Received event network-vif-plugged-0c413503-8ce1-454a-a6b0-3fb75d647a04 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1151.089459] env[70020]: DEBUG oslo_concurrency.lockutils [req-f9f8fcf6-1842-4445-944d-53519ce5da52 req-31f5ae4b-6f50-46c5-97ce-8552d480820b service nova] Acquiring lock "cc46e905-958e-4dc3-8f83-f8b5680f94de-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1151.089753] env[70020]: DEBUG oslo_concurrency.lockutils [req-f9f8fcf6-1842-4445-944d-53519ce5da52 req-31f5ae4b-6f50-46c5-97ce-8552d480820b service nova] Lock "cc46e905-958e-4dc3-8f83-f8b5680f94de-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1151.089871] env[70020]: DEBUG oslo_concurrency.lockutils [req-f9f8fcf6-1842-4445-944d-53519ce5da52 req-31f5ae4b-6f50-46c5-97ce-8552d480820b service nova] Lock "cc46e905-958e-4dc3-8f83-f8b5680f94de-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1151.090053] env[70020]: DEBUG nova.compute.manager [req-f9f8fcf6-1842-4445-944d-53519ce5da52 req-31f5ae4b-6f50-46c5-97ce-8552d480820b service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] No waiting events found dispatching network-vif-plugged-0c413503-8ce1-454a-a6b0-3fb75d647a04 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1151.090227] env[70020]: WARNING nova.compute.manager [req-f9f8fcf6-1842-4445-944d-53519ce5da52 req-31f5ae4b-6f50-46c5-97ce-8552d480820b service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Received unexpected event network-vif-plugged-0c413503-8ce1-454a-a6b0-3fb75d647a04 for instance with vm_state building and task_state spawning. [ 1151.090383] env[70020]: DEBUG nova.compute.manager [req-f9f8fcf6-1842-4445-944d-53519ce5da52 req-31f5ae4b-6f50-46c5-97ce-8552d480820b service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Received event network-changed-0c413503-8ce1-454a-a6b0-3fb75d647a04 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1151.090536] env[70020]: DEBUG nova.compute.manager [req-f9f8fcf6-1842-4445-944d-53519ce5da52 req-31f5ae4b-6f50-46c5-97ce-8552d480820b service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Refreshing instance network info cache due to event network-changed-0c413503-8ce1-454a-a6b0-3fb75d647a04. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1151.090695] env[70020]: DEBUG oslo_concurrency.lockutils [req-f9f8fcf6-1842-4445-944d-53519ce5da52 req-31f5ae4b-6f50-46c5-97ce-8552d480820b service nova] Acquiring lock "refresh_cache-cc46e905-958e-4dc3-8f83-f8b5680f94de" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.147117] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1151.194864] env[70020]: INFO nova.compute.manager [-] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Took 1.03 seconds to deallocate network for instance. [ 1151.312105] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00523c0e-2e1a-42f9-bbd2-ff63552a1261 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.322394] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32053b81-2618-4986-aba7-7f426eabedea {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.352956] env[70020]: DEBUG nova.network.neutron [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1151.355404] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "refresh_cache-ce4796b0-4ad2-4468-9898-aaedce6dcd32" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.355571] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquired lock "refresh_cache-ce4796b0-4ad2-4468-9898-aaedce6dcd32" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1151.355755] env[70020]: DEBUG nova.network.neutron [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1151.357410] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e7ec75f-d23c-4350-9994-e64decd0a38b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.367017] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1921f79-c08b-42f2-9a23-f41db09333d8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.384631] env[70020]: DEBUG nova.compute.provider_tree [None req-e2e55643-1cfd-4f61-accb-f43cef2abc6e tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1151.391889] env[70020]: DEBUG oslo_vmware.api [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3619078, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.501485] env[70020]: DEBUG nova.network.neutron [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Updating instance_info_cache with network_info: [{"id": "0c413503-8ce1-454a-a6b0-3fb75d647a04", "address": "fa:16:3e:ca:8d:a7", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c413503-8c", "ovs_interfaceid": "0c413503-8ce1-454a-a6b0-3fb75d647a04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1151.599815] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d64ff3a-6d8b-4447-879b-10a5db8d760f tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Volume attach. Driver type: vmdk {{(pid=70020) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1151.600158] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d64ff3a-6d8b-4447-879b-10a5db8d760f tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721816', 'volume_id': '9e3399bb-294d-4dc8-865a-5a6fc34ad741', 'name': 'volume-9e3399bb-294d-4dc8-865a-5a6fc34ad741', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9e7bd10b-3a78-48d8-9b66-e3646635be6d', 'attached_at': '', 'detached_at': '', 'volume_id': '9e3399bb-294d-4dc8-865a-5a6fc34ad741', 'serial': '9e3399bb-294d-4dc8-865a-5a6fc34ad741'} {{(pid=70020) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1151.601107] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-871125a0-0195-4216-9fd6-8204c98be0f2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.619328] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deb6c043-4265-4718-b8f4-e3f52c2b448c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.645211] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d64ff3a-6d8b-4447-879b-10a5db8d760f tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] volume-9e3399bb-294d-4dc8-865a-5a6fc34ad741/volume-9e3399bb-294d-4dc8-865a-5a6fc34ad741.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1151.645211] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4305ab76-e151-4c46-9fe2-29a3a99a3e94 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.662752] env[70020]: DEBUG oslo_vmware.api [None req-4d64ff3a-6d8b-4447-879b-10a5db8d760f tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1151.662752] env[70020]: value = "task-3619079" [ 1151.662752] env[70020]: _type = "Task" [ 1151.662752] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.670441] env[70020]: DEBUG oslo_vmware.api [None req-4d64ff3a-6d8b-4447-879b-10a5db8d760f tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619079, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.703873] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d3542fe8-02b3-42b0-942d-d92f55675403 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1151.881057] env[70020]: DEBUG oslo_vmware.api [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3619078, 'name': ReconfigVM_Task, 'duration_secs': 0.888346} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.881304] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Reconfigured VM instance instance-0000006b to attach disk [datastore1] c0a78ace-307e-4156-beb3-a53061acff7f/c0a78ace-307e-4156-beb3-a53061acff7f.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1151.881585] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Updating instance 'c0a78ace-307e-4156-beb3-a53061acff7f' progress to 50 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1151.887139] env[70020]: DEBUG nova.scheduler.client.report [None req-e2e55643-1cfd-4f61-accb-f43cef2abc6e tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1151.922160] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-c31a5b76-4d91-4f59-bb14-4164b337ab03 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Volume attach. Driver type: vmdk {{(pid=70020) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1151.922403] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-c31a5b76-4d91-4f59-bb14-4164b337ab03 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721817', 'volume_id': '68b7bed5-35e8-4ee8-81ea-460623d98a8b', 'name': 'volume-68b7bed5-35e8-4ee8-81ea-460623d98a8b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f9d4837f-0e3f-4a83-9055-04d17ef3eb23', 'attached_at': '', 'detached_at': '', 'volume_id': '68b7bed5-35e8-4ee8-81ea-460623d98a8b', 'serial': '68b7bed5-35e8-4ee8-81ea-460623d98a8b'} {{(pid=70020) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1151.923270] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb6dd7de-35a5-4e51-868b-d5c48e4f3b0c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.941920] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d38a4325-f2f7-4867-a6e1-6175fcd4f60f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.969215] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-c31a5b76-4d91-4f59-bb14-4164b337ab03 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Reconfiguring VM instance instance-00000069 to attach disk [datastore2] volume-68b7bed5-35e8-4ee8-81ea-460623d98a8b/volume-68b7bed5-35e8-4ee8-81ea-460623d98a8b.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1151.971848] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-360d845f-e78a-4e42-afdf-3badc50ebefd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.990322] env[70020]: DEBUG oslo_vmware.api [None req-c31a5b76-4d91-4f59-bb14-4164b337ab03 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 1151.990322] env[70020]: value = "task-3619080" [ 1151.990322] env[70020]: _type = "Task" [ 1151.990322] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.998849] env[70020]: DEBUG oslo_vmware.api [None req-c31a5b76-4d91-4f59-bb14-4164b337ab03 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3619080, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.004426] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Releasing lock "refresh_cache-cc46e905-958e-4dc3-8f83-f8b5680f94de" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1152.004836] env[70020]: DEBUG nova.compute.manager [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Instance network_info: |[{"id": "0c413503-8ce1-454a-a6b0-3fb75d647a04", "address": "fa:16:3e:ca:8d:a7", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c413503-8c", "ovs_interfaceid": "0c413503-8ce1-454a-a6b0-3fb75d647a04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1152.005144] env[70020]: DEBUG oslo_concurrency.lockutils [req-f9f8fcf6-1842-4445-944d-53519ce5da52 req-31f5ae4b-6f50-46c5-97ce-8552d480820b service nova] Acquired lock "refresh_cache-cc46e905-958e-4dc3-8f83-f8b5680f94de" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1152.005327] env[70020]: DEBUG nova.network.neutron [req-f9f8fcf6-1842-4445-944d-53519ce5da52 req-31f5ae4b-6f50-46c5-97ce-8552d480820b service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Refreshing network info cache for port 0c413503-8ce1-454a-a6b0-3fb75d647a04 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1152.006970] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ca:8d:a7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd5970ab5-34b8-4065-bfa6-f568b8f103b7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0c413503-8ce1-454a-a6b0-3fb75d647a04', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1152.014404] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1152.017927] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1152.018483] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-de51521f-7def-4e34-bbdf-b5245539e06f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.038635] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1152.038635] env[70020]: value = "task-3619081" [ 1152.038635] env[70020]: _type = "Task" [ 1152.038635] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.048969] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619081, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.094427] env[70020]: DEBUG nova.network.neutron [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Updating instance_info_cache with network_info: [{"id": "ff971a6c-7fab-4c04-a75d-259986b9fce0", "address": "fa:16:3e:34:e6:98", "network": {"id": "c7c6ab1d-12b0-4699-ab0e-47b8d23ee3bc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2032377329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3f6d704dd464768953c41d34d34d944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6a6f7bb-6106-4cfd-9aef-b85628d0cefa", "external-id": "nsx-vlan-transportzone-194", "segmentation_id": 194, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff971a6c-7f", "ovs_interfaceid": "ff971a6c-7fab-4c04-a75d-259986b9fce0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1152.173302] env[70020]: DEBUG oslo_vmware.api [None req-4d64ff3a-6d8b-4447-879b-10a5db8d760f tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619079, 'name': ReconfigVM_Task, 'duration_secs': 0.350866} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.173504] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d64ff3a-6d8b-4447-879b-10a5db8d760f tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Reconfigured VM instance instance-00000068 to attach disk [datastore1] volume-9e3399bb-294d-4dc8-865a-5a6fc34ad741/volume-9e3399bb-294d-4dc8-865a-5a6fc34ad741.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1152.178596] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4f0e6b3d-ec7a-47f8-afff-356ecdd6e67b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.193879] env[70020]: DEBUG oslo_vmware.api [None req-4d64ff3a-6d8b-4447-879b-10a5db8d760f tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1152.193879] env[70020]: value = "task-3619082" [ 1152.193879] env[70020]: _type = "Task" [ 1152.193879] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.202069] env[70020]: DEBUG oslo_vmware.api [None req-4d64ff3a-6d8b-4447-879b-10a5db8d760f tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619082, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.388539] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1a17d47-48bf-41d5-b9b3-84c6d72a8827 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.391642] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e2e55643-1cfd-4f61-accb-f43cef2abc6e tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.842s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1152.393676] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.247s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1152.393858] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1152.394015] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=70020) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1152.394305] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d3542fe8-02b3-42b0-942d-d92f55675403 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.691s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1152.394463] env[70020]: DEBUG nova.objects.instance [None req-d3542fe8-02b3-42b0-942d-d92f55675403 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Lazy-loading 'resources' on Instance uuid d0252c4e-0991-45b9-bf0b-b8e41093e518 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1152.396244] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fd1c511-264d-4209-ab46-14460e3145d3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.413944] env[70020]: INFO nova.scheduler.client.report [None req-e2e55643-1cfd-4f61-accb-f43cef2abc6e tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Deleted allocations for instance 1e9d39d5-40fd-40b7-9421-94e0bff0314e [ 1152.418572] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0676ce68-5a51-4822-80ef-06b8deafe879 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.424921] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-783e69fd-3075-4708-8fed-db7127495763 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.443101] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Updating instance 'c0a78ace-307e-4156-beb3-a53061acff7f' progress to 67 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1152.458352] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd15e0b2-4eb9-4a86-9292-53f4c8c598b2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.465203] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc7d092c-7f59-4492-95f8-ff0191f6989e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.496479] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179553MB free_disk=75GB free_vcpus=48 pci_devices=None {{(pid=70020) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1152.496657] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1152.504618] env[70020]: DEBUG oslo_vmware.api [None req-c31a5b76-4d91-4f59-bb14-4164b337ab03 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3619080, 'name': ReconfigVM_Task, 'duration_secs': 0.357531} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.504941] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-c31a5b76-4d91-4f59-bb14-4164b337ab03 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Reconfigured VM instance instance-00000069 to attach disk [datastore2] volume-68b7bed5-35e8-4ee8-81ea-460623d98a8b/volume-68b7bed5-35e8-4ee8-81ea-460623d98a8b.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1152.509694] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-334de578-513b-4c1f-93f4-38dede2e9a96 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.526548] env[70020]: DEBUG oslo_vmware.api [None req-c31a5b76-4d91-4f59-bb14-4164b337ab03 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 1152.526548] env[70020]: value = "task-3619083" [ 1152.526548] env[70020]: _type = "Task" [ 1152.526548] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.534294] env[70020]: DEBUG oslo_vmware.api [None req-c31a5b76-4d91-4f59-bb14-4164b337ab03 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3619083, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.546327] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619081, 'name': CreateVM_Task, 'duration_secs': 0.343385} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.546538] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1152.547227] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1152.547405] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1152.547720] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1152.549964] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7415de24-fbcc-4939-9eee-96e3aed16c19 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.555361] env[70020]: DEBUG oslo_vmware.api [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1152.555361] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52bd2dc0-8826-ed51-213f-4862b1e0bb69" [ 1152.555361] env[70020]: _type = "Task" [ 1152.555361] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.562798] env[70020]: DEBUG oslo_vmware.api [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52bd2dc0-8826-ed51-213f-4862b1e0bb69, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.597067] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Releasing lock "refresh_cache-ce4796b0-4ad2-4468-9898-aaedce6dcd32" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1152.703188] env[70020]: DEBUG oslo_vmware.api [None req-4d64ff3a-6d8b-4447-879b-10a5db8d760f tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619082, 'name': ReconfigVM_Task, 'duration_secs': 0.17363} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.703515] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d64ff3a-6d8b-4447-879b-10a5db8d760f tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721816', 'volume_id': '9e3399bb-294d-4dc8-865a-5a6fc34ad741', 'name': 'volume-9e3399bb-294d-4dc8-865a-5a6fc34ad741', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9e7bd10b-3a78-48d8-9b66-e3646635be6d', 'attached_at': '', 'detached_at': '', 'volume_id': '9e3399bb-294d-4dc8-865a-5a6fc34ad741', 'serial': '9e3399bb-294d-4dc8-865a-5a6fc34ad741'} {{(pid=70020) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1152.710740] env[70020]: DEBUG nova.network.neutron [req-f9f8fcf6-1842-4445-944d-53519ce5da52 req-31f5ae4b-6f50-46c5-97ce-8552d480820b service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Updated VIF entry in instance network info cache for port 0c413503-8ce1-454a-a6b0-3fb75d647a04. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1152.711078] env[70020]: DEBUG nova.network.neutron [req-f9f8fcf6-1842-4445-944d-53519ce5da52 req-31f5ae4b-6f50-46c5-97ce-8552d480820b service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Updating instance_info_cache with network_info: [{"id": "0c413503-8ce1-454a-a6b0-3fb75d647a04", "address": "fa:16:3e:ca:8d:a7", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c413503-8c", "ovs_interfaceid": "0c413503-8ce1-454a-a6b0-3fb75d647a04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1152.921037] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e2e55643-1cfd-4f61-accb-f43cef2abc6e tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "1e9d39d5-40fd-40b7-9421-94e0bff0314e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.904s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1152.991929] env[70020]: DEBUG nova.network.neutron [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Port 4332b789-1993-4df4-8099-15089bf507db binding to destination host cpu-1 is already ACTIVE {{(pid=70020) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1153.035539] env[70020]: DEBUG oslo_vmware.api [None req-c31a5b76-4d91-4f59-bb14-4164b337ab03 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3619083, 'name': ReconfigVM_Task, 'duration_secs': 0.148583} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.037884] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-c31a5b76-4d91-4f59-bb14-4164b337ab03 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721817', 'volume_id': '68b7bed5-35e8-4ee8-81ea-460623d98a8b', 'name': 'volume-68b7bed5-35e8-4ee8-81ea-460623d98a8b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f9d4837f-0e3f-4a83-9055-04d17ef3eb23', 'attached_at': '', 'detached_at': '', 'volume_id': '68b7bed5-35e8-4ee8-81ea-460623d98a8b', 'serial': '68b7bed5-35e8-4ee8-81ea-460623d98a8b'} {{(pid=70020) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1153.068032] env[70020]: DEBUG oslo_vmware.api [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52bd2dc0-8826-ed51-213f-4862b1e0bb69, 'name': SearchDatastore_Task, 'duration_secs': 0.010301} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.068032] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1153.068268] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1153.068348] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1153.068502] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1153.068681] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1153.068940] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c92fdfc2-4f10-45fa-8d8c-018d0ac61902 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.078795] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1153.078976] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1153.079697] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a38a516-558d-4683-a43e-1a51d67d10aa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.086912] env[70020]: DEBUG oslo_vmware.api [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1153.086912] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]526c2615-8ea2-8cd9-dc54-3a26010fb3ad" [ 1153.086912] env[70020]: _type = "Task" [ 1153.086912] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.097513] env[70020]: DEBUG oslo_vmware.api [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]526c2615-8ea2-8cd9-dc54-3a26010fb3ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.108234] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79a3b775-7718-4a94-abc6-bbcfcc78f6c4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.116346] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f85f3819-c1e8-40ee-a99f-218f35306c18 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.120710] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d926b17d-e445-4cff-8143-fd0ce73746de {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.164945] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cae4d2a5-d9bd-4768-aab4-569a07da6dcc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.168903] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8e24dbc-4760-415e-b794-71022a1baf26 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.176365] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Updating instance 'ce4796b0-4ad2-4468-9898-aaedce6dcd32' progress to 83 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1153.182905] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-652b220e-77e1-47b0-af56-5bdd9a178205 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.196339] env[70020]: DEBUG nova.compute.provider_tree [None req-d3542fe8-02b3-42b0-942d-d92f55675403 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1153.214038] env[70020]: DEBUG oslo_concurrency.lockutils [req-f9f8fcf6-1842-4445-944d-53519ce5da52 req-31f5ae4b-6f50-46c5-97ce-8552d480820b service nova] Releasing lock "refresh_cache-cc46e905-958e-4dc3-8f83-f8b5680f94de" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1153.597185] env[70020]: DEBUG oslo_vmware.api [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]526c2615-8ea2-8cd9-dc54-3a26010fb3ad, 'name': SearchDatastore_Task, 'duration_secs': 0.01764} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.597962] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d523fd03-1fb6-4e02-84cb-fcb86c45984c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.603626] env[70020]: DEBUG oslo_vmware.api [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1153.603626] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]527f4f83-2e4f-5a35-7ee1-83d58bcb1409" [ 1153.603626] env[70020]: _type = "Task" [ 1153.603626] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.610978] env[70020]: DEBUG oslo_vmware.api [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]527f4f83-2e4f-5a35-7ee1-83d58bcb1409, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.683979] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1153.684334] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-81ea1ef3-3200-4bef-aa29-479ac0dac382 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.691743] env[70020]: DEBUG oslo_vmware.api [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1153.691743] env[70020]: value = "task-3619084" [ 1153.691743] env[70020]: _type = "Task" [ 1153.691743] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.698848] env[70020]: DEBUG nova.scheduler.client.report [None req-d3542fe8-02b3-42b0-942d-d92f55675403 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1153.708401] env[70020]: DEBUG oslo_vmware.api [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619084, 'name': PowerOnVM_Task} progress is 33%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.742584] env[70020]: DEBUG nova.objects.instance [None req-4d64ff3a-6d8b-4447-879b-10a5db8d760f tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lazy-loading 'flavor' on Instance uuid 9e7bd10b-3a78-48d8-9b66-e3646635be6d {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1154.015470] env[70020]: DEBUG oslo_concurrency.lockutils [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "c0a78ace-307e-4156-beb3-a53061acff7f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1154.015805] env[70020]: DEBUG oslo_concurrency.lockutils [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "c0a78ace-307e-4156-beb3-a53061acff7f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1154.015990] env[70020]: DEBUG oslo_concurrency.lockutils [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "c0a78ace-307e-4156-beb3-a53061acff7f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1154.052621] env[70020]: DEBUG oslo_concurrency.lockutils [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "85da90b5-c3cc-4e35-8c86-6aca07992a09" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1154.052876] env[70020]: DEBUG oslo_concurrency.lockutils [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "85da90b5-c3cc-4e35-8c86-6aca07992a09" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1154.070669] env[70020]: DEBUG nova.objects.instance [None req-c31a5b76-4d91-4f59-bb14-4164b337ab03 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lazy-loading 'flavor' on Instance uuid f9d4837f-0e3f-4a83-9055-04d17ef3eb23 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1154.115513] env[70020]: DEBUG oslo_vmware.api [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]527f4f83-2e4f-5a35-7ee1-83d58bcb1409, 'name': SearchDatastore_Task, 'duration_secs': 0.028726} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.115950] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1154.116092] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] cc46e905-958e-4dc3-8f83-f8b5680f94de/cc46e905-958e-4dc3-8f83-f8b5680f94de.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1154.116352] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-65f9f6cf-eb52-42c2-b2b6-8b5c36a02dd0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.122717] env[70020]: DEBUG oslo_vmware.api [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1154.122717] env[70020]: value = "task-3619085" [ 1154.122717] env[70020]: _type = "Task" [ 1154.122717] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.130195] env[70020]: DEBUG oslo_vmware.api [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619085, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.201811] env[70020]: DEBUG oslo_vmware.api [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619084, 'name': PowerOnVM_Task, 'duration_secs': 0.384093} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.202054] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1154.202264] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0c344fc5-c140-4759-b257-c67fc5ca2baa tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Updating instance 'ce4796b0-4ad2-4468-9898-aaedce6dcd32' progress to 100 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1154.206629] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d3542fe8-02b3-42b0-942d-d92f55675403 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.812s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1154.208923] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 1.712s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1154.238743] env[70020]: INFO nova.scheduler.client.report [None req-d3542fe8-02b3-42b0-942d-d92f55675403 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Deleted allocations for instance d0252c4e-0991-45b9-bf0b-b8e41093e518 [ 1154.247183] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4d64ff3a-6d8b-4447-879b-10a5db8d760f tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lock "9e7bd10b-3a78-48d8-9b66-e3646635be6d" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 11.755s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1154.335650] env[70020]: INFO nova.compute.manager [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Rescuing [ 1154.336032] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquiring lock "refresh_cache-9e7bd10b-3a78-48d8-9b66-e3646635be6d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1154.336192] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquired lock "refresh_cache-9e7bd10b-3a78-48d8-9b66-e3646635be6d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1154.336360] env[70020]: DEBUG nova.network.neutron [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1154.439533] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e4cbf92b-b1ef-4db2-8996-8c878fff89ea tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "f9d4837f-0e3f-4a83-9055-04d17ef3eb23" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1154.556500] env[70020]: DEBUG nova.compute.manager [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1154.576551] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c31a5b76-4d91-4f59-bb14-4164b337ab03 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "f9d4837f-0e3f-4a83-9055-04d17ef3eb23" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.252s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1154.577774] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e4cbf92b-b1ef-4db2-8996-8c878fff89ea tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "f9d4837f-0e3f-4a83-9055-04d17ef3eb23" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.138s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1154.632886] env[70020]: DEBUG oslo_vmware.api [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619085, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.464455} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.633160] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] cc46e905-958e-4dc3-8f83-f8b5680f94de/cc46e905-958e-4dc3-8f83-f8b5680f94de.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1154.633372] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1154.633622] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-66ad564e-d09e-4d32-a883-48fbd45b2f6c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.640315] env[70020]: DEBUG oslo_vmware.api [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1154.640315] env[70020]: value = "task-3619086" [ 1154.640315] env[70020]: _type = "Task" [ 1154.640315] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.647857] env[70020]: DEBUG oslo_vmware.api [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619086, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.747562] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d3542fe8-02b3-42b0-942d-d92f55675403 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Lock "d0252c4e-0991-45b9-bf0b-b8e41093e518" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.248s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1155.060123] env[70020]: DEBUG oslo_concurrency.lockutils [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "refresh_cache-c0a78ace-307e-4156-beb3-a53061acff7f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1155.060123] env[70020]: DEBUG oslo_concurrency.lockutils [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquired lock "refresh_cache-c0a78ace-307e-4156-beb3-a53061acff7f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1155.060123] env[70020]: DEBUG nova.network.neutron [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1155.060543] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d3f56d05-f4e8-408c-9207-a50dce77b3f5 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Acquiring lock "899183eb-ba25-491f-b981-77a33239ed74" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1155.060745] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d3f56d05-f4e8-408c-9207-a50dce77b3f5 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Lock "899183eb-ba25-491f-b981-77a33239ed74" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1155.060936] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d3f56d05-f4e8-408c-9207-a50dce77b3f5 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Acquiring lock "899183eb-ba25-491f-b981-77a33239ed74-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1155.061124] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d3f56d05-f4e8-408c-9207-a50dce77b3f5 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Lock "899183eb-ba25-491f-b981-77a33239ed74-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1155.061288] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d3f56d05-f4e8-408c-9207-a50dce77b3f5 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Lock "899183eb-ba25-491f-b981-77a33239ed74-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1155.064455] env[70020]: INFO nova.compute.manager [None req-d3f56d05-f4e8-408c-9207-a50dce77b3f5 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: 899183eb-ba25-491f-b981-77a33239ed74] Terminating instance [ 1155.081015] env[70020]: INFO nova.compute.manager [None req-e4cbf92b-b1ef-4db2-8996-8c878fff89ea tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Detaching volume 68b7bed5-35e8-4ee8-81ea-460623d98a8b [ 1155.088202] env[70020]: DEBUG nova.network.neutron [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Updating instance_info_cache with network_info: [{"id": "2573d470-4c75-40c7-9e9b-6130f5e14092", "address": "fa:16:3e:bc:56:02", "network": {"id": "5481228b-9a6b-468e-bca7-0123c469ae56", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-340609977-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.161", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e7aae0b70f9d465ebcb9defe385fa434", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2573d470-4c", "ovs_interfaceid": "2573d470-4c75-40c7-9e9b-6130f5e14092", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1155.092240] env[70020]: DEBUG oslo_concurrency.lockutils [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1155.120464] env[70020]: INFO nova.virt.block_device [None req-e4cbf92b-b1ef-4db2-8996-8c878fff89ea tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Attempting to driver detach volume 68b7bed5-35e8-4ee8-81ea-460623d98a8b from mountpoint /dev/sdb [ 1155.120702] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4cbf92b-b1ef-4db2-8996-8c878fff89ea tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Volume detach. Driver type: vmdk {{(pid=70020) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1155.120891] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4cbf92b-b1ef-4db2-8996-8c878fff89ea tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721817', 'volume_id': '68b7bed5-35e8-4ee8-81ea-460623d98a8b', 'name': 'volume-68b7bed5-35e8-4ee8-81ea-460623d98a8b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f9d4837f-0e3f-4a83-9055-04d17ef3eb23', 'attached_at': '', 'detached_at': '', 'volume_id': '68b7bed5-35e8-4ee8-81ea-460623d98a8b', 'serial': '68b7bed5-35e8-4ee8-81ea-460623d98a8b'} {{(pid=70020) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1155.121775] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99065142-1cbb-4f3f-9f6e-e0396bb7cc62 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.148293] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a9f868-7be2-4f71-9b03-c1da1dc0cd74 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.157478] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff44ff06-1833-4e42-bc67-4ca351939899 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.159757] env[70020]: DEBUG oslo_vmware.api [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619086, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063218} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.160013] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1155.160977] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13c6f9c8-b120-4e10-83f3-d7d7b24adfca {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.179280] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e0cb98a-2679-4b74-a114-aa44f2242838 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.198683] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] cc46e905-958e-4dc3-8f83-f8b5680f94de/cc46e905-958e-4dc3-8f83-f8b5680f94de.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1155.199379] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-de40722a-a8ba-4d6b-b82d-becaf81bd4bc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.228681] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Applying migration context for instance c0a78ace-307e-4156-beb3-a53061acff7f as it has an incoming, in-progress migration 4b514c28-2530-40de-84ec-b4948bca618a. Migration status is post-migrating {{(pid=70020) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1155.228897] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Applying migration context for instance ce4796b0-4ad2-4468-9898-aaedce6dcd32 as it has an incoming, in-progress migration 4939402d-e13e-49ea-912f-3c8637ee0898. Migration status is finished {{(pid=70020) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1155.230341] env[70020]: INFO nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Updating resource usage from migration 4b514c28-2530-40de-84ec-b4948bca618a [ 1155.230668] env[70020]: INFO nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Updating resource usage from migration 4939402d-e13e-49ea-912f-3c8637ee0898 [ 1155.232753] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4cbf92b-b1ef-4db2-8996-8c878fff89ea tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] The volume has not been displaced from its original location: [datastore2] volume-68b7bed5-35e8-4ee8-81ea-460623d98a8b/volume-68b7bed5-35e8-4ee8-81ea-460623d98a8b.vmdk. No consolidation needed. {{(pid=70020) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1155.237834] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4cbf92b-b1ef-4db2-8996-8c878fff89ea tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Reconfiguring VM instance instance-00000069 to detach disk 2001 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1155.238979] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a3af2cc1-24b9-46ea-9382-c3e5337cc926 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.253961] env[70020]: DEBUG oslo_vmware.api [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1155.253961] env[70020]: value = "task-3619087" [ 1155.253961] env[70020]: _type = "Task" [ 1155.253961] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.257952] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 13f6daa5-d859-40ed-b1b0-edd7717b8df3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1155.258106] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 96966bf2-a9ff-48ba-be3f-c767e7b6eedd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1155.258231] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1155.258347] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 5b69d3b2-c236-45f9-b35b-a9992b9c1c79 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1155.258460] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 8dbb1de0-38de-493f-9512-b8754bab7bcb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1155.258569] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 9e7bd10b-3a78-48d8-9b66-e3646635be6d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1155.258686] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance f9d4837f-0e3f-4a83-9055-04d17ef3eb23 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1155.258815] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 899183eb-ba25-491f-b981-77a33239ed74 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1155.258924] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 9962b718-ca31-4f09-91f3-133dd68612ad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1155.259042] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Migration 4939402d-e13e-49ea-912f-3c8637ee0898 is active on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 192, 'DISK_GB': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1155.259152] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance ce4796b0-4ad2-4468-9898-aaedce6dcd32 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1155.259256] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Migration 4b514c28-2530-40de-84ec-b4948bca618a is active on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 192, 'DISK_GB': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1155.259360] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance c0a78ace-307e-4156-beb3-a53061acff7f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1155.259462] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance cc46e905-958e-4dc3-8f83-f8b5680f94de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1155.261904] env[70020]: DEBUG oslo_vmware.api [None req-e4cbf92b-b1ef-4db2-8996-8c878fff89ea tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 1155.261904] env[70020]: value = "task-3619088" [ 1155.261904] env[70020]: _type = "Task" [ 1155.261904] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.270893] env[70020]: DEBUG oslo_vmware.api [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619087, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.275834] env[70020]: DEBUG oslo_vmware.api [None req-e4cbf92b-b1ef-4db2-8996-8c878fff89ea tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3619088, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.573947] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d3f56d05-f4e8-408c-9207-a50dce77b3f5 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Acquiring lock "refresh_cache-899183eb-ba25-491f-b981-77a33239ed74" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1155.574204] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d3f56d05-f4e8-408c-9207-a50dce77b3f5 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Acquired lock "refresh_cache-899183eb-ba25-491f-b981-77a33239ed74" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1155.574399] env[70020]: DEBUG nova.network.neutron [None req-d3f56d05-f4e8-408c-9207-a50dce77b3f5 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: 899183eb-ba25-491f-b981-77a33239ed74] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1155.594476] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Releasing lock "refresh_cache-9e7bd10b-3a78-48d8-9b66-e3646635be6d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1155.761124] env[70020]: DEBUG nova.network.neutron [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Updating instance_info_cache with network_info: [{"id": "4332b789-1993-4df4-8099-15089bf507db", "address": "fa:16:3e:1a:0a:5b", "network": {"id": "83a3ee04-e0ff-40e7-ae51-c463add43abb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2003290189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cfa7d3b1f5a14c60b19cde5030c2f0a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4332b789-19", "ovs_interfaceid": "4332b789-1993-4df4-8099-15089bf507db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1155.766562] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 85da90b5-c3cc-4e35-8c86-6aca07992a09 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1155.766817] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Total usable vcpus: 48, total allocated vcpus: 14 {{(pid=70020) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1155.766980] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3328MB phys_disk=200GB used_disk=14GB total_vcpus=48 used_vcpus=14 pci_stats=[] {{(pid=70020) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1155.770040] env[70020]: DEBUG oslo_vmware.api [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619087, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.779712] env[70020]: DEBUG oslo_vmware.api [None req-e4cbf92b-b1ef-4db2-8996-8c878fff89ea tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3619088, 'name': ReconfigVM_Task, 'duration_secs': 0.226789} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.780016] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4cbf92b-b1ef-4db2-8996-8c878fff89ea tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Reconfigured VM instance instance-00000069 to detach disk 2001 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1155.787662] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-39116176-f7dc-4c5b-805a-5bde381ad691 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.804707] env[70020]: DEBUG oslo_vmware.api [None req-e4cbf92b-b1ef-4db2-8996-8c878fff89ea tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 1155.804707] env[70020]: value = "task-3619089" [ 1155.804707] env[70020]: _type = "Task" [ 1155.804707] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.817205] env[70020]: DEBUG oslo_vmware.api [None req-e4cbf92b-b1ef-4db2-8996-8c878fff89ea tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3619089, 'name': ReconfigVM_Task} progress is 10%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.991789] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4567a520-b2ac-47e9-9a8f-fe00ca7762d0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.999391] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8dadb77-ef2d-41a0-84cf-e897e55ec150 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.034520] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7e7b758-a91f-438b-a62d-a9c862635da6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.042307] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62f25393-bcb8-4ace-89f9-58169440e687 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.058872] env[70020]: DEBUG nova.compute.provider_tree [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1156.091985] env[70020]: DEBUG nova.network.neutron [None req-d3f56d05-f4e8-408c-9207-a50dce77b3f5 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: 899183eb-ba25-491f-b981-77a33239ed74] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1156.149903] env[70020]: DEBUG nova.network.neutron [None req-d3f56d05-f4e8-408c-9207-a50dce77b3f5 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: 899183eb-ba25-491f-b981-77a33239ed74] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1156.266372] env[70020]: DEBUG oslo_vmware.api [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619087, 'name': ReconfigVM_Task, 'duration_secs': 0.974239} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.266707] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Reconfigured VM instance instance-00000070 to attach disk [datastore2] cc46e905-958e-4dc3-8f83-f8b5680f94de/cc46e905-958e-4dc3-8f83-f8b5680f94de.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1156.267381] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7a4b6647-8874-4c17-a146-38868b22de1c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.271746] env[70020]: DEBUG oslo_concurrency.lockutils [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Releasing lock "refresh_cache-c0a78ace-307e-4156-beb3-a53061acff7f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1156.276696] env[70020]: DEBUG oslo_vmware.api [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1156.276696] env[70020]: value = "task-3619090" [ 1156.276696] env[70020]: _type = "Task" [ 1156.276696] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.284896] env[70020]: DEBUG oslo_vmware.api [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619090, 'name': Rename_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.314637] env[70020]: DEBUG oslo_vmware.api [None req-e4cbf92b-b1ef-4db2-8996-8c878fff89ea tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3619089, 'name': ReconfigVM_Task, 'duration_secs': 0.160596} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.315123] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4cbf92b-b1ef-4db2-8996-8c878fff89ea tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721817', 'volume_id': '68b7bed5-35e8-4ee8-81ea-460623d98a8b', 'name': 'volume-68b7bed5-35e8-4ee8-81ea-460623d98a8b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f9d4837f-0e3f-4a83-9055-04d17ef3eb23', 'attached_at': '', 'detached_at': '', 'volume_id': '68b7bed5-35e8-4ee8-81ea-460623d98a8b', 'serial': '68b7bed5-35e8-4ee8-81ea-460623d98a8b'} {{(pid=70020) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1156.454952] env[70020]: DEBUG oslo_concurrency.lockutils [None req-75ab2ae2-1f20-4ab7-9850-707c65fc0424 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "ce4796b0-4ad2-4468-9898-aaedce6dcd32" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1156.455242] env[70020]: DEBUG oslo_concurrency.lockutils [None req-75ab2ae2-1f20-4ab7-9850-707c65fc0424 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "ce4796b0-4ad2-4468-9898-aaedce6dcd32" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1156.455431] env[70020]: DEBUG nova.compute.manager [None req-75ab2ae2-1f20-4ab7-9850-707c65fc0424 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Going to confirm migration 4 {{(pid=70020) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1156.562193] env[70020]: DEBUG nova.scheduler.client.report [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1156.651939] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d3f56d05-f4e8-408c-9207-a50dce77b3f5 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Releasing lock "refresh_cache-899183eb-ba25-491f-b981-77a33239ed74" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1156.652371] env[70020]: DEBUG nova.compute.manager [None req-d3f56d05-f4e8-408c-9207-a50dce77b3f5 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: 899183eb-ba25-491f-b981-77a33239ed74] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1156.652562] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d3f56d05-f4e8-408c-9207-a50dce77b3f5 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: 899183eb-ba25-491f-b981-77a33239ed74] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1156.653688] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7299fd6f-3512-4051-a3ce-99e4dfc2a1c2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.661349] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3f56d05-f4e8-408c-9207-a50dce77b3f5 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: 899183eb-ba25-491f-b981-77a33239ed74] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1156.661595] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-08199c5a-31ad-45bf-baa7-0c35ab7dbfd7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.667804] env[70020]: DEBUG oslo_vmware.api [None req-d3f56d05-f4e8-408c-9207-a50dce77b3f5 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Waiting for the task: (returnval){ [ 1156.667804] env[70020]: value = "task-3619091" [ 1156.667804] env[70020]: _type = "Task" [ 1156.667804] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.675470] env[70020]: DEBUG oslo_vmware.api [None req-d3f56d05-f4e8-408c-9207-a50dce77b3f5 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619091, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.792211] env[70020]: DEBUG oslo_vmware.api [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619090, 'name': Rename_Task, 'duration_secs': 0.194276} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.792517] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1156.792793] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d28b20e0-e22c-4854-84d4-84ddb9067f67 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.797459] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccef5030-b8c2-40c4-bbd8-46cc3c2fa4c0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.800892] env[70020]: DEBUG oslo_vmware.api [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1156.800892] env[70020]: value = "task-3619092" [ 1156.800892] env[70020]: _type = "Task" [ 1156.800892] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.820323] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16b0dd10-0c19-427d-be1f-cfae4be38783 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.826455] env[70020]: DEBUG oslo_vmware.api [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619092, 'name': PowerOnVM_Task} progress is 33%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.830958] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Updating instance 'c0a78ace-307e-4156-beb3-a53061acff7f' progress to 83 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1156.864106] env[70020]: DEBUG nova.objects.instance [None req-e4cbf92b-b1ef-4db2-8996-8c878fff89ea tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lazy-loading 'flavor' on Instance uuid f9d4837f-0e3f-4a83-9055-04d17ef3eb23 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1157.018178] env[70020]: DEBUG oslo_concurrency.lockutils [None req-75ab2ae2-1f20-4ab7-9850-707c65fc0424 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "refresh_cache-ce4796b0-4ad2-4468-9898-aaedce6dcd32" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1157.018370] env[70020]: DEBUG oslo_concurrency.lockutils [None req-75ab2ae2-1f20-4ab7-9850-707c65fc0424 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquired lock "refresh_cache-ce4796b0-4ad2-4468-9898-aaedce6dcd32" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1157.018542] env[70020]: DEBUG nova.network.neutron [None req-75ab2ae2-1f20-4ab7-9850-707c65fc0424 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1157.018722] env[70020]: DEBUG nova.objects.instance [None req-75ab2ae2-1f20-4ab7-9850-707c65fc0424 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lazy-loading 'info_cache' on Instance uuid ce4796b0-4ad2-4468-9898-aaedce6dcd32 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1157.067358] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=70020) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1157.067589] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.859s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1157.067859] env[70020]: DEBUG oslo_concurrency.lockutils [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.976s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1157.069400] env[70020]: INFO nova.compute.claims [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1157.071732] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1157.071864] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Cleaning up deleted instances {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11834}} [ 1157.132326] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1157.132730] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2a71d1fb-6389-4ea6-9b48-21606b801467 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.139738] env[70020]: DEBUG oslo_vmware.api [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1157.139738] env[70020]: value = "task-3619093" [ 1157.139738] env[70020]: _type = "Task" [ 1157.139738] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.147777] env[70020]: DEBUG oslo_vmware.api [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619093, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.179501] env[70020]: DEBUG oslo_vmware.api [None req-d3f56d05-f4e8-408c-9207-a50dce77b3f5 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619091, 'name': PowerOffVM_Task, 'duration_secs': 0.218805} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.179747] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3f56d05-f4e8-408c-9207-a50dce77b3f5 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: 899183eb-ba25-491f-b981-77a33239ed74] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1157.179912] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d3f56d05-f4e8-408c-9207-a50dce77b3f5 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: 899183eb-ba25-491f-b981-77a33239ed74] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1157.180197] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7045f5a7-52e2-47ae-aad3-0385d758cd02 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.202730] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d3f56d05-f4e8-408c-9207-a50dce77b3f5 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: 899183eb-ba25-491f-b981-77a33239ed74] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1157.202970] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d3f56d05-f4e8-408c-9207-a50dce77b3f5 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: 899183eb-ba25-491f-b981-77a33239ed74] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1157.203193] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3f56d05-f4e8-408c-9207-a50dce77b3f5 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Deleting the datastore file [datastore1] 899183eb-ba25-491f-b981-77a33239ed74 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1157.203455] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-17516cb5-1684-4b22-8bc8-646df63ddcab {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.210166] env[70020]: DEBUG oslo_vmware.api [None req-d3f56d05-f4e8-408c-9207-a50dce77b3f5 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Waiting for the task: (returnval){ [ 1157.210166] env[70020]: value = "task-3619095" [ 1157.210166] env[70020]: _type = "Task" [ 1157.210166] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.218133] env[70020]: DEBUG oslo_vmware.api [None req-d3f56d05-f4e8-408c-9207-a50dce77b3f5 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619095, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.316262] env[70020]: DEBUG oslo_vmware.api [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619092, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.338236] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1157.338604] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-595f654e-a53d-4cdf-bc47-7f32b0a88c4c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.346940] env[70020]: DEBUG oslo_vmware.api [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 1157.346940] env[70020]: value = "task-3619096" [ 1157.346940] env[70020]: _type = "Task" [ 1157.346940] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.356627] env[70020]: DEBUG oslo_vmware.api [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3619096, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.589748] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] There are 56 instances to clean {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11843}} [ 1157.589937] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 1e9d39d5-40fd-40b7-9421-94e0bff0314e] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1157.650260] env[70020]: DEBUG oslo_vmware.api [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619093, 'name': PowerOffVM_Task, 'duration_secs': 0.212037} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.650532] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1157.651339] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e09987a-cff7-470a-8a36-52e098942847 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.674990] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85efdcd7-fad5-4f59-bbc1-b53061dd169f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.707321] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1157.707624] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-03dba402-258c-4ad0-bec7-9710a397b7fa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.714991] env[70020]: DEBUG oslo_vmware.api [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1157.714991] env[70020]: value = "task-3619097" [ 1157.714991] env[70020]: _type = "Task" [ 1157.714991] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.721090] env[70020]: DEBUG oslo_vmware.api [None req-d3f56d05-f4e8-408c-9207-a50dce77b3f5 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Task: {'id': task-3619095, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170882} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.721542] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3f56d05-f4e8-408c-9207-a50dce77b3f5 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1157.721755] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d3f56d05-f4e8-408c-9207-a50dce77b3f5 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: 899183eb-ba25-491f-b981-77a33239ed74] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1157.721975] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d3f56d05-f4e8-408c-9207-a50dce77b3f5 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: 899183eb-ba25-491f-b981-77a33239ed74] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1157.722222] env[70020]: INFO nova.compute.manager [None req-d3f56d05-f4e8-408c-9207-a50dce77b3f5 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] [instance: 899183eb-ba25-491f-b981-77a33239ed74] Took 1.07 seconds to destroy the instance on the hypervisor. [ 1157.722401] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d3f56d05-f4e8-408c-9207-a50dce77b3f5 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1157.722648] env[70020]: DEBUG nova.compute.manager [-] [instance: 899183eb-ba25-491f-b981-77a33239ed74] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1157.722769] env[70020]: DEBUG nova.network.neutron [-] [instance: 899183eb-ba25-491f-b981-77a33239ed74] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1157.727584] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] VM already powered off {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1157.727773] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1157.728018] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1157.728178] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1157.728352] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1157.728621] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-63d86bc6-6071-475f-ba4f-191f59332512 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.737092] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1157.737271] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1157.737976] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c38f27c2-1689-430a-b024-5ff296e96090 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.742133] env[70020]: DEBUG nova.network.neutron [-] [instance: 899183eb-ba25-491f-b981-77a33239ed74] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1157.744293] env[70020]: DEBUG oslo_vmware.api [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1157.744293] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52dbc820-3404-8e55-292c-21cd2e046c46" [ 1157.744293] env[70020]: _type = "Task" [ 1157.744293] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.751715] env[70020]: DEBUG oslo_vmware.api [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52dbc820-3404-8e55-292c-21cd2e046c46, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.811550] env[70020]: DEBUG oslo_vmware.api [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619092, 'name': PowerOnVM_Task, 'duration_secs': 0.855492} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.811859] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1157.811995] env[70020]: INFO nova.compute.manager [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Took 8.05 seconds to spawn the instance on the hypervisor. [ 1157.812253] env[70020]: DEBUG nova.compute.manager [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1157.812998] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e63b11f1-9c3c-4090-95e3-6f75fdb9df02 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.858635] env[70020]: DEBUG oslo_vmware.api [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3619096, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.871799] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e4cbf92b-b1ef-4db2-8996-8c878fff89ea tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "f9d4837f-0e3f-4a83-9055-04d17ef3eb23" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.294s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1158.095881] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: d0252c4e-0991-45b9-bf0b-b8e41093e518] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1158.235594] env[70020]: DEBUG nova.network.neutron [None req-75ab2ae2-1f20-4ab7-9850-707c65fc0424 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Updating instance_info_cache with network_info: [{"id": "ff971a6c-7fab-4c04-a75d-259986b9fce0", "address": "fa:16:3e:34:e6:98", "network": {"id": "c7c6ab1d-12b0-4699-ab0e-47b8d23ee3bc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2032377329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3f6d704dd464768953c41d34d34d944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6a6f7bb-6106-4cfd-9aef-b85628d0cefa", "external-id": "nsx-vlan-transportzone-194", "segmentation_id": 194, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff971a6c-7f", "ovs_interfaceid": "ff971a6c-7fab-4c04-a75d-259986b9fce0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1158.245548] env[70020]: DEBUG nova.network.neutron [-] [instance: 899183eb-ba25-491f-b981-77a33239ed74] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1158.260200] env[70020]: DEBUG oslo_vmware.api [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52dbc820-3404-8e55-292c-21cd2e046c46, 'name': SearchDatastore_Task, 'duration_secs': 0.019243} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.264029] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5cb51c7-8240-4481-940f-687ff483ad9f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.269108] env[70020]: DEBUG oslo_vmware.api [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1158.269108] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52322de3-0467-3d25-3fab-16e71891d41c" [ 1158.269108] env[70020]: _type = "Task" [ 1158.269108] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.279296] env[70020]: DEBUG oslo_vmware.api [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52322de3-0467-3d25-3fab-16e71891d41c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.299072] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-065bfa7b-0fcf-4464-ab18-99687d20ecbf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.305769] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e83a8a75-40c5-4471-a481-2960332653be {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.341571] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d00321dc-af0f-4595-a5fa-c3feae145bf5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.344433] env[70020]: INFO nova.compute.manager [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Took 15.79 seconds to build instance. [ 1158.353468] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dd18b5b-970b-4b76-84df-4bc89a7ca5f3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.365019] env[70020]: DEBUG oslo_vmware.api [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3619096, 'name': PowerOnVM_Task, 'duration_secs': 0.587642} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.370294] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1158.370469] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-be68a1d0-7c48-46f7-8e9a-82f0568ed806 tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Updating instance 'c0a78ace-307e-4156-beb3-a53061acff7f' progress to 100 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1158.374276] env[70020]: DEBUG nova.compute.provider_tree [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1158.598825] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: db24c4e0-f778-4488-b9cb-a06b21932b4e] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1158.738374] env[70020]: DEBUG oslo_concurrency.lockutils [None req-75ab2ae2-1f20-4ab7-9850-707c65fc0424 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Releasing lock "refresh_cache-ce4796b0-4ad2-4468-9898-aaedce6dcd32" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1158.738615] env[70020]: DEBUG nova.objects.instance [None req-75ab2ae2-1f20-4ab7-9850-707c65fc0424 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lazy-loading 'migration_context' on Instance uuid ce4796b0-4ad2-4468-9898-aaedce6dcd32 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1158.751786] env[70020]: INFO nova.compute.manager [-] [instance: 899183eb-ba25-491f-b981-77a33239ed74] Took 1.03 seconds to deallocate network for instance. [ 1158.779848] env[70020]: DEBUG oslo_vmware.api [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52322de3-0467-3d25-3fab-16e71891d41c, 'name': SearchDatastore_Task, 'duration_secs': 0.033322} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.780759] env[70020]: DEBUG oslo_concurrency.lockutils [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1158.780759] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 9e7bd10b-3a78-48d8-9b66-e3646635be6d/c9cd83bf-fd12-4173-a067-f57d38f23556-rescue.vmdk. {{(pid=70020) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1158.780902] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d9c5871d-02e1-42c4-9585-0619c760fa75 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.788547] env[70020]: DEBUG oslo_vmware.api [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1158.788547] env[70020]: value = "task-3619098" [ 1158.788547] env[70020]: _type = "Task" [ 1158.788547] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.797929] env[70020]: DEBUG oslo_vmware.api [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619098, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.846634] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d9aa6f3b-1d28-474b-8fdb-c1f2b4979b1f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "cc46e905-958e-4dc3-8f83-f8b5680f94de" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.305s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1158.879911] env[70020]: DEBUG nova.scheduler.client.report [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1158.959550] env[70020]: DEBUG oslo_concurrency.lockutils [None req-57520b71-311e-44cd-8fdb-c83729d6aa46 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "f9d4837f-0e3f-4a83-9055-04d17ef3eb23" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1158.959814] env[70020]: DEBUG oslo_concurrency.lockutils [None req-57520b71-311e-44cd-8fdb-c83729d6aa46 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "f9d4837f-0e3f-4a83-9055-04d17ef3eb23" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1158.960045] env[70020]: DEBUG oslo_concurrency.lockutils [None req-57520b71-311e-44cd-8fdb-c83729d6aa46 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "f9d4837f-0e3f-4a83-9055-04d17ef3eb23-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1158.960232] env[70020]: DEBUG oslo_concurrency.lockutils [None req-57520b71-311e-44cd-8fdb-c83729d6aa46 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "f9d4837f-0e3f-4a83-9055-04d17ef3eb23-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1158.960405] env[70020]: DEBUG oslo_concurrency.lockutils [None req-57520b71-311e-44cd-8fdb-c83729d6aa46 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "f9d4837f-0e3f-4a83-9055-04d17ef3eb23-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1158.962784] env[70020]: INFO nova.compute.manager [None req-57520b71-311e-44cd-8fdb-c83729d6aa46 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Terminating instance [ 1159.091275] env[70020]: DEBUG nova.compute.manager [req-9d0b95a2-33ad-4add-b6a0-90e7012aa969 req-76501ecb-0a4e-4e50-8394-d66a1ab0964f service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Received event network-changed-0c413503-8ce1-454a-a6b0-3fb75d647a04 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1159.091392] env[70020]: DEBUG nova.compute.manager [req-9d0b95a2-33ad-4add-b6a0-90e7012aa969 req-76501ecb-0a4e-4e50-8394-d66a1ab0964f service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Refreshing instance network info cache due to event network-changed-0c413503-8ce1-454a-a6b0-3fb75d647a04. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1159.091578] env[70020]: DEBUG oslo_concurrency.lockutils [req-9d0b95a2-33ad-4add-b6a0-90e7012aa969 req-76501ecb-0a4e-4e50-8394-d66a1ab0964f service nova] Acquiring lock "refresh_cache-cc46e905-958e-4dc3-8f83-f8b5680f94de" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1159.091734] env[70020]: DEBUG oslo_concurrency.lockutils [req-9d0b95a2-33ad-4add-b6a0-90e7012aa969 req-76501ecb-0a4e-4e50-8394-d66a1ab0964f service nova] Acquired lock "refresh_cache-cc46e905-958e-4dc3-8f83-f8b5680f94de" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1159.091884] env[70020]: DEBUG nova.network.neutron [req-9d0b95a2-33ad-4add-b6a0-90e7012aa969 req-76501ecb-0a4e-4e50-8394-d66a1ab0964f service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Refreshing network info cache for port 0c413503-8ce1-454a-a6b0-3fb75d647a04 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1159.101729] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: f1a09304-7725-489a-8669-322a51c709e5] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1159.241617] env[70020]: DEBUG nova.objects.base [None req-75ab2ae2-1f20-4ab7-9850-707c65fc0424 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=70020) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1159.242721] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f3ef02a-6b3c-4c6e-94ec-853aad190cf0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.263703] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d3f56d05-f4e8-408c-9207-a50dce77b3f5 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1159.264794] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5666393-f55d-4584-9159-a61fe6787606 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.270210] env[70020]: DEBUG oslo_vmware.api [None req-75ab2ae2-1f20-4ab7-9850-707c65fc0424 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1159.270210] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]526fcd1e-9763-78ac-47cd-ca9418eb1496" [ 1159.270210] env[70020]: _type = "Task" [ 1159.270210] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.277910] env[70020]: DEBUG oslo_vmware.api [None req-75ab2ae2-1f20-4ab7-9850-707c65fc0424 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]526fcd1e-9763-78ac-47cd-ca9418eb1496, 'name': SearchDatastore_Task} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.278219] env[70020]: DEBUG oslo_concurrency.lockutils [None req-75ab2ae2-1f20-4ab7-9850-707c65fc0424 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1159.297181] env[70020]: DEBUG oslo_vmware.api [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619098, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.475467} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.297546] env[70020]: INFO nova.virt.vmwareapi.ds_util [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 9e7bd10b-3a78-48d8-9b66-e3646635be6d/c9cd83bf-fd12-4173-a067-f57d38f23556-rescue.vmdk. [ 1159.298338] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94cb8302-227e-40d6-850b-40e4897a4dab {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.326685] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Reconfiguring VM instance instance-00000068 to attach disk [datastore2] 9e7bd10b-3a78-48d8-9b66-e3646635be6d/c9cd83bf-fd12-4173-a067-f57d38f23556-rescue.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1159.326948] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a0b4c38e-e4a1-41d8-ab1a-45d37be2e563 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.344639] env[70020]: DEBUG oslo_vmware.api [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1159.344639] env[70020]: value = "task-3619099" [ 1159.344639] env[70020]: _type = "Task" [ 1159.344639] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.355505] env[70020]: DEBUG oslo_vmware.api [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619099, 'name': ReconfigVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.388452] env[70020]: DEBUG oslo_concurrency.lockutils [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.320s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1159.388910] env[70020]: DEBUG nova.compute.manager [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1159.392018] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d3f56d05-f4e8-408c-9207-a50dce77b3f5 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.128s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1159.392018] env[70020]: DEBUG nova.objects.instance [None req-d3f56d05-f4e8-408c-9207-a50dce77b3f5 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Lazy-loading 'resources' on Instance uuid 899183eb-ba25-491f-b981-77a33239ed74 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1159.468015] env[70020]: DEBUG nova.compute.manager [None req-57520b71-311e-44cd-8fdb-c83729d6aa46 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1159.468015] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-57520b71-311e-44cd-8fdb-c83729d6aa46 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1159.468171] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a4d8982-af5e-4c5b-8dbf-0c50bdca1410 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.476537] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-57520b71-311e-44cd-8fdb-c83729d6aa46 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1159.476785] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ab13e432-faad-472a-8d2d-7e846a8eff1d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.482562] env[70020]: DEBUG oslo_vmware.api [None req-57520b71-311e-44cd-8fdb-c83729d6aa46 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 1159.482562] env[70020]: value = "task-3619100" [ 1159.482562] env[70020]: _type = "Task" [ 1159.482562] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.491772] env[70020]: DEBUG oslo_vmware.api [None req-57520b71-311e-44cd-8fdb-c83729d6aa46 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3619100, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.605051] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 3163a070-a0db-4a41-af32-dfbe7a1766ac] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1159.806259] env[70020]: DEBUG nova.network.neutron [req-9d0b95a2-33ad-4add-b6a0-90e7012aa969 req-76501ecb-0a4e-4e50-8394-d66a1ab0964f service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Updated VIF entry in instance network info cache for port 0c413503-8ce1-454a-a6b0-3fb75d647a04. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1159.806627] env[70020]: DEBUG nova.network.neutron [req-9d0b95a2-33ad-4add-b6a0-90e7012aa969 req-76501ecb-0a4e-4e50-8394-d66a1ab0964f service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Updating instance_info_cache with network_info: [{"id": "0c413503-8ce1-454a-a6b0-3fb75d647a04", "address": "fa:16:3e:ca:8d:a7", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c413503-8c", "ovs_interfaceid": "0c413503-8ce1-454a-a6b0-3fb75d647a04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1159.855330] env[70020]: DEBUG oslo_vmware.api [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619099, 'name': ReconfigVM_Task, 'duration_secs': 0.353791} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.855717] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Reconfigured VM instance instance-00000068 to attach disk [datastore2] 9e7bd10b-3a78-48d8-9b66-e3646635be6d/c9cd83bf-fd12-4173-a067-f57d38f23556-rescue.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1159.856529] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57b2cb8b-6be6-4343-b7c4-3c0c126f7fb2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.883913] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-77625bf8-4257-49b4-8463-b3a173adeb2c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.902178] env[70020]: DEBUG nova.compute.utils [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1159.904602] env[70020]: DEBUG nova.compute.manager [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1159.904766] env[70020]: DEBUG nova.network.neutron [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1159.912468] env[70020]: DEBUG oslo_vmware.api [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1159.912468] env[70020]: value = "task-3619101" [ 1159.912468] env[70020]: _type = "Task" [ 1159.912468] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.922873] env[70020]: DEBUG oslo_vmware.api [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619101, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.945341] env[70020]: DEBUG nova.policy [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '025d293d3c0449e1b36a7aa465ad1110', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bd3733a000724aab9255cb498cecdfba', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1159.996753] env[70020]: DEBUG oslo_vmware.api [None req-57520b71-311e-44cd-8fdb-c83729d6aa46 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3619100, 'name': PowerOffVM_Task, 'duration_secs': 0.226399} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.997353] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-57520b71-311e-44cd-8fdb-c83729d6aa46 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1159.997353] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-57520b71-311e-44cd-8fdb-c83729d6aa46 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1159.997560] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8777ed6c-6d09-453d-a6a8-48e22a9d6a68 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.062033] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-57520b71-311e-44cd-8fdb-c83729d6aa46 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1160.062427] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-57520b71-311e-44cd-8fdb-c83729d6aa46 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1160.062575] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-57520b71-311e-44cd-8fdb-c83729d6aa46 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Deleting the datastore file [datastore1] f9d4837f-0e3f-4a83-9055-04d17ef3eb23 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1160.062903] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ed292aa2-c282-4351-98ee-6150dde57b4e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.069516] env[70020]: DEBUG oslo_vmware.api [None req-57520b71-311e-44cd-8fdb-c83729d6aa46 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 1160.069516] env[70020]: value = "task-3619103" [ 1160.069516] env[70020]: _type = "Task" [ 1160.069516] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.080265] env[70020]: DEBUG oslo_vmware.api [None req-57520b71-311e-44cd-8fdb-c83729d6aa46 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3619103, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.105994] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b22f3273-9677-4108-ad98-a6abd3049ffe {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.109360] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: e5c6ad2e-9925-4234-a7da-ea2618b7c7d5] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1160.117034] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa0970e4-a83f-433e-8639-bda65b3f3a33 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.153001] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-217e3474-0094-4cfe-aed8-b686bfd673fd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.161985] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f77a353b-6f58-48c0-b010-270fdb1206c3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.177247] env[70020]: DEBUG nova.compute.provider_tree [None req-d3f56d05-f4e8-408c-9207-a50dce77b3f5 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1160.241029] env[70020]: DEBUG nova.network.neutron [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Successfully created port: fdf42d56-b2c8-46af-be73-f38818a96a46 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1160.309245] env[70020]: DEBUG oslo_concurrency.lockutils [req-9d0b95a2-33ad-4add-b6a0-90e7012aa969 req-76501ecb-0a4e-4e50-8394-d66a1ab0964f service nova] Releasing lock "refresh_cache-cc46e905-958e-4dc3-8f83-f8b5680f94de" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1160.407824] env[70020]: DEBUG nova.compute.manager [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1160.423533] env[70020]: DEBUG oslo_vmware.api [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619101, 'name': ReconfigVM_Task, 'duration_secs': 0.1861} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.423950] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1160.424286] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3a08a59e-0b30-46ba-9e30-5107e61c09e0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.431161] env[70020]: DEBUG oslo_vmware.api [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1160.431161] env[70020]: value = "task-3619104" [ 1160.431161] env[70020]: _type = "Task" [ 1160.431161] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.439444] env[70020]: DEBUG oslo_vmware.api [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619104, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.583442] env[70020]: DEBUG oslo_vmware.api [None req-57520b71-311e-44cd-8fdb-c83729d6aa46 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3619103, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.201065} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.583709] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-57520b71-311e-44cd-8fdb-c83729d6aa46 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1160.583890] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-57520b71-311e-44cd-8fdb-c83729d6aa46 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1160.584078] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-57520b71-311e-44cd-8fdb-c83729d6aa46 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1160.584255] env[70020]: INFO nova.compute.manager [None req-57520b71-311e-44cd-8fdb-c83729d6aa46 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1160.584497] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-57520b71-311e-44cd-8fdb-c83729d6aa46 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1160.584689] env[70020]: DEBUG nova.compute.manager [-] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1160.584785] env[70020]: DEBUG nova.network.neutron [-] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1160.613422] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 9d1568bf-4027-4d4c-b089-276006eee715] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1160.680372] env[70020]: DEBUG nova.scheduler.client.report [None req-d3f56d05-f4e8-408c-9207-a50dce77b3f5 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1160.942169] env[70020]: DEBUG oslo_vmware.api [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619104, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.117696] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: c972e083-8c91-4875-a8c6-8257b06c93a1] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1161.121081] env[70020]: DEBUG nova.compute.manager [req-7091b99d-b77b-4860-b5a7-8b29503eb2f0 req-a80541f0-3a98-482f-9091-6a7d2f4503d6 service nova] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Received event network-vif-deleted-8cef7e32-bdf1-41df-be67-ab80c6f894de {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1161.121271] env[70020]: INFO nova.compute.manager [req-7091b99d-b77b-4860-b5a7-8b29503eb2f0 req-a80541f0-3a98-482f-9091-6a7d2f4503d6 service nova] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Neutron deleted interface 8cef7e32-bdf1-41df-be67-ab80c6f894de; detaching it from the instance and deleting it from the info cache [ 1161.121440] env[70020]: DEBUG nova.network.neutron [req-7091b99d-b77b-4860-b5a7-8b29503eb2f0 req-a80541f0-3a98-482f-9091-6a7d2f4503d6 service nova] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1161.168430] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "c0a78ace-307e-4156-beb3-a53061acff7f" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1161.168687] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "c0a78ace-307e-4156-beb3-a53061acff7f" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1161.168946] env[70020]: DEBUG nova.compute.manager [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Going to confirm migration 5 {{(pid=70020) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1161.186512] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d3f56d05-f4e8-408c-9207-a50dce77b3f5 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.795s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1161.189705] env[70020]: DEBUG oslo_concurrency.lockutils [None req-75ab2ae2-1f20-4ab7-9850-707c65fc0424 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 1.911s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1161.217916] env[70020]: INFO nova.scheduler.client.report [None req-d3f56d05-f4e8-408c-9207-a50dce77b3f5 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Deleted allocations for instance 899183eb-ba25-491f-b981-77a33239ed74 [ 1161.417033] env[70020]: DEBUG nova.compute.manager [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1161.445115] env[70020]: DEBUG oslo_vmware.api [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619104, 'name': PowerOnVM_Task, 'duration_secs': 0.844725} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.447351] env[70020]: DEBUG nova.virt.hardware [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1161.447581] env[70020]: DEBUG nova.virt.hardware [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1161.447734] env[70020]: DEBUG nova.virt.hardware [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1161.447915] env[70020]: DEBUG nova.virt.hardware [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1161.448086] env[70020]: DEBUG nova.virt.hardware [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1161.448247] env[70020]: DEBUG nova.virt.hardware [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1161.448451] env[70020]: DEBUG nova.virt.hardware [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1161.448605] env[70020]: DEBUG nova.virt.hardware [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1161.448767] env[70020]: DEBUG nova.virt.hardware [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1161.448926] env[70020]: DEBUG nova.virt.hardware [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1161.449121] env[70020]: DEBUG nova.virt.hardware [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1161.449410] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1161.451489] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06bf5bbc-463a-4744-8a59-25f699048e7c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.454983] env[70020]: DEBUG nova.compute.manager [None req-4802c350-1a48-4ce4-8931-57a6463aac4b tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1161.456018] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fdf6db8-3d71-4aff-8e97-c192799ef52e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.467000] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54787cef-50ee-4fa0-9cc1-b182562ce2f5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.532128] env[70020]: DEBUG nova.network.neutron [-] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1161.624526] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 97fe6c57-03de-4cf8-a990-ff4f88db6cd7] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1161.627029] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-56539547-fb24-45b4-b9f3-1275e0822598 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.639629] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-904d2e1d-426f-47ad-9c0c-80cc4fa82792 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.677225] env[70020]: DEBUG nova.compute.manager [req-7091b99d-b77b-4860-b5a7-8b29503eb2f0 req-a80541f0-3a98-482f-9091-6a7d2f4503d6 service nova] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Detach interface failed, port_id=8cef7e32-bdf1-41df-be67-ab80c6f894de, reason: Instance f9d4837f-0e3f-4a83-9055-04d17ef3eb23 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1161.706681] env[70020]: DEBUG nova.network.neutron [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Successfully updated port: fdf42d56-b2c8-46af-be73-f38818a96a46 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1161.725406] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d3f56d05-f4e8-408c-9207-a50dce77b3f5 tempest-ServerShowV247Test-567239629 tempest-ServerShowV247Test-567239629-project-member] Lock "899183eb-ba25-491f-b981-77a33239ed74" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.664s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1161.743932] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "refresh_cache-c0a78ace-307e-4156-beb3-a53061acff7f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1161.744141] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquired lock "refresh_cache-c0a78ace-307e-4156-beb3-a53061acff7f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1161.744316] env[70020]: DEBUG nova.network.neutron [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1161.744499] env[70020]: DEBUG nova.objects.instance [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lazy-loading 'info_cache' on Instance uuid c0a78ace-307e-4156-beb3-a53061acff7f {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1161.886981] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ac39e8e-e627-4c06-92ea-e071c975c76c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.895318] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07bb85a6-0fda-4bb7-8716-d64eff6951d3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.926301] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53ef775e-5c39-4b94-9a33-71b5051ebcba {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.933623] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc7b4eba-798b-4427-93b3-0db801a6cbcb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.947082] env[70020]: DEBUG nova.compute.provider_tree [None req-75ab2ae2-1f20-4ab7-9850-707c65fc0424 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1162.034953] env[70020]: INFO nova.compute.manager [-] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Took 1.45 seconds to deallocate network for instance. [ 1162.127685] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 1ddd5a29-075b-482a-a6e9-4c7345673a00] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1162.209718] env[70020]: DEBUG oslo_concurrency.lockutils [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "refresh_cache-85da90b5-c3cc-4e35-8c86-6aca07992a09" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1162.209904] env[70020]: DEBUG oslo_concurrency.lockutils [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquired lock "refresh_cache-85da90b5-c3cc-4e35-8c86-6aca07992a09" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1162.210093] env[70020]: DEBUG nova.network.neutron [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1162.467825] env[70020]: ERROR nova.scheduler.client.report [None req-75ab2ae2-1f20-4ab7-9850-707c65fc0424 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [req-1417e380-b78d-44ef-8480-9addb8e5042b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1417e380-b78d-44ef-8480-9addb8e5042b"}]} [ 1162.483746] env[70020]: DEBUG nova.scheduler.client.report [None req-75ab2ae2-1f20-4ab7-9850-707c65fc0424 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1162.497260] env[70020]: DEBUG nova.scheduler.client.report [None req-75ab2ae2-1f20-4ab7-9850-707c65fc0424 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1162.497475] env[70020]: DEBUG nova.compute.provider_tree [None req-75ab2ae2-1f20-4ab7-9850-707c65fc0424 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1162.507972] env[70020]: DEBUG nova.scheduler.client.report [None req-75ab2ae2-1f20-4ab7-9850-707c65fc0424 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1162.525219] env[70020]: DEBUG nova.scheduler.client.report [None req-75ab2ae2-1f20-4ab7-9850-707c65fc0424 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1162.527625] env[70020]: INFO nova.compute.manager [None req-3594b474-9327-4ba6-829d-615d1eb6da63 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Unrescuing [ 1162.527921] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3594b474-9327-4ba6-829d-615d1eb6da63 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquiring lock "refresh_cache-9e7bd10b-3a78-48d8-9b66-e3646635be6d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1162.528169] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3594b474-9327-4ba6-829d-615d1eb6da63 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquired lock "refresh_cache-9e7bd10b-3a78-48d8-9b66-e3646635be6d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1162.528416] env[70020]: DEBUG nova.network.neutron [None req-3594b474-9327-4ba6-829d-615d1eb6da63 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1162.540545] env[70020]: DEBUG oslo_concurrency.lockutils [None req-57520b71-311e-44cd-8fdb-c83729d6aa46 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1162.630844] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 04de1a07-cf38-41e0-be96-237bbe1ead83] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1162.696147] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a96867cd-b242-4e0a-b8e7-119d2b831441 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.704455] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75dd1d91-a711-4d90-aeee-5054e8596a01 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.738571] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99fb4424-8d7b-4948-b711-8bd8b0867a13 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.746432] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99b56738-fe87-46e6-ac78-3f096727c107 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.762810] env[70020]: DEBUG nova.compute.provider_tree [None req-75ab2ae2-1f20-4ab7-9850-707c65fc0424 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1162.789580] env[70020]: DEBUG nova.network.neutron [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1163.014306] env[70020]: DEBUG nova.network.neutron [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Updating instance_info_cache with network_info: [{"id": "fdf42d56-b2c8-46af-be73-f38818a96a46", "address": "fa:16:3e:e6:ad:5b", "network": {"id": "bff860aa-ea09-4260-84e8-d9ffd2c5bf4b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1893681432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd3733a000724aab9255cb498cecdfba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfdf42d56-b2", "ovs_interfaceid": "fdf42d56-b2c8-46af-be73-f38818a96a46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1163.060877] env[70020]: DEBUG nova.network.neutron [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Updating instance_info_cache with network_info: [{"id": "4332b789-1993-4df4-8099-15089bf507db", "address": "fa:16:3e:1a:0a:5b", "network": {"id": "83a3ee04-e0ff-40e7-ae51-c463add43abb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2003290189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cfa7d3b1f5a14c60b19cde5030c2f0a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4332b789-19", "ovs_interfaceid": "4332b789-1993-4df4-8099-15089bf507db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1163.133684] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 0453722d-258f-49e3-b61e-f1081eb465c6] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1163.242914] env[70020]: DEBUG nova.compute.manager [req-bc4ac091-9966-4cc4-8788-b7ca9631aa5c req-e193c9e5-afba-49c6-ac16-a36f85263bb7 service nova] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Received event network-vif-plugged-fdf42d56-b2c8-46af-be73-f38818a96a46 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1163.243568] env[70020]: DEBUG oslo_concurrency.lockutils [req-bc4ac091-9966-4cc4-8788-b7ca9631aa5c req-e193c9e5-afba-49c6-ac16-a36f85263bb7 service nova] Acquiring lock "85da90b5-c3cc-4e35-8c86-6aca07992a09-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1163.243905] env[70020]: DEBUG oslo_concurrency.lockutils [req-bc4ac091-9966-4cc4-8788-b7ca9631aa5c req-e193c9e5-afba-49c6-ac16-a36f85263bb7 service nova] Lock "85da90b5-c3cc-4e35-8c86-6aca07992a09-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1163.243978] env[70020]: DEBUG oslo_concurrency.lockutils [req-bc4ac091-9966-4cc4-8788-b7ca9631aa5c req-e193c9e5-afba-49c6-ac16-a36f85263bb7 service nova] Lock "85da90b5-c3cc-4e35-8c86-6aca07992a09-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1163.244273] env[70020]: DEBUG nova.compute.manager [req-bc4ac091-9966-4cc4-8788-b7ca9631aa5c req-e193c9e5-afba-49c6-ac16-a36f85263bb7 service nova] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] No waiting events found dispatching network-vif-plugged-fdf42d56-b2c8-46af-be73-f38818a96a46 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1163.244437] env[70020]: WARNING nova.compute.manager [req-bc4ac091-9966-4cc4-8788-b7ca9631aa5c req-e193c9e5-afba-49c6-ac16-a36f85263bb7 service nova] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Received unexpected event network-vif-plugged-fdf42d56-b2c8-46af-be73-f38818a96a46 for instance with vm_state building and task_state spawning. [ 1163.244504] env[70020]: DEBUG nova.compute.manager [req-bc4ac091-9966-4cc4-8788-b7ca9631aa5c req-e193c9e5-afba-49c6-ac16-a36f85263bb7 service nova] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Received event network-changed-fdf42d56-b2c8-46af-be73-f38818a96a46 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1163.244649] env[70020]: DEBUG nova.compute.manager [req-bc4ac091-9966-4cc4-8788-b7ca9631aa5c req-e193c9e5-afba-49c6-ac16-a36f85263bb7 service nova] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Refreshing instance network info cache due to event network-changed-fdf42d56-b2c8-46af-be73-f38818a96a46. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1163.244817] env[70020]: DEBUG oslo_concurrency.lockutils [req-bc4ac091-9966-4cc4-8788-b7ca9631aa5c req-e193c9e5-afba-49c6-ac16-a36f85263bb7 service nova] Acquiring lock "refresh_cache-85da90b5-c3cc-4e35-8c86-6aca07992a09" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1163.285031] env[70020]: DEBUG nova.network.neutron [None req-3594b474-9327-4ba6-829d-615d1eb6da63 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Updating instance_info_cache with network_info: [{"id": "2573d470-4c75-40c7-9e9b-6130f5e14092", "address": "fa:16:3e:bc:56:02", "network": {"id": "5481228b-9a6b-468e-bca7-0123c469ae56", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-340609977-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.161", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e7aae0b70f9d465ebcb9defe385fa434", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2573d470-4c", "ovs_interfaceid": "2573d470-4c75-40c7-9e9b-6130f5e14092", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1163.300185] env[70020]: DEBUG nova.scheduler.client.report [None req-75ab2ae2-1f20-4ab7-9850-707c65fc0424 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updated inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with generation 160 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1163.300458] env[70020]: DEBUG nova.compute.provider_tree [None req-75ab2ae2-1f20-4ab7-9850-707c65fc0424 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updating resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d generation from 160 to 161 during operation: update_inventory {{(pid=70020) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1163.300644] env[70020]: DEBUG nova.compute.provider_tree [None req-75ab2ae2-1f20-4ab7-9850-707c65fc0424 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1163.517594] env[70020]: DEBUG oslo_concurrency.lockutils [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Releasing lock "refresh_cache-85da90b5-c3cc-4e35-8c86-6aca07992a09" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1163.517923] env[70020]: DEBUG nova.compute.manager [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Instance network_info: |[{"id": "fdf42d56-b2c8-46af-be73-f38818a96a46", "address": "fa:16:3e:e6:ad:5b", "network": {"id": "bff860aa-ea09-4260-84e8-d9ffd2c5bf4b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1893681432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd3733a000724aab9255cb498cecdfba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfdf42d56-b2", "ovs_interfaceid": "fdf42d56-b2c8-46af-be73-f38818a96a46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1163.518352] env[70020]: DEBUG oslo_concurrency.lockutils [req-bc4ac091-9966-4cc4-8788-b7ca9631aa5c req-e193c9e5-afba-49c6-ac16-a36f85263bb7 service nova] Acquired lock "refresh_cache-85da90b5-c3cc-4e35-8c86-6aca07992a09" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1163.518692] env[70020]: DEBUG nova.network.neutron [req-bc4ac091-9966-4cc4-8788-b7ca9631aa5c req-e193c9e5-afba-49c6-ac16-a36f85263bb7 service nova] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Refreshing network info cache for port fdf42d56-b2c8-46af-be73-f38818a96a46 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1163.520495] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:ad:5b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3952eb02-1162-48ed-8227-9c138960d583', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fdf42d56-b2c8-46af-be73-f38818a96a46', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1163.528816] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1163.532337] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1163.532903] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5fe6b78f-4612-4bfc-9286-776130127080 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.553409] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1163.553409] env[70020]: value = "task-3619105" [ 1163.553409] env[70020]: _type = "Task" [ 1163.553409] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.566171] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Releasing lock "refresh_cache-c0a78ace-307e-4156-beb3-a53061acff7f" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1163.566514] env[70020]: DEBUG nova.objects.instance [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lazy-loading 'migration_context' on Instance uuid c0a78ace-307e-4156-beb3-a53061acff7f {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1163.567642] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619105, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.640182] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 056141e3-5628-4451-bd25-f4fa15edd11e] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1163.787710] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3594b474-9327-4ba6-829d-615d1eb6da63 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Releasing lock "refresh_cache-9e7bd10b-3a78-48d8-9b66-e3646635be6d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1163.788347] env[70020]: DEBUG nova.objects.instance [None req-3594b474-9327-4ba6-829d-615d1eb6da63 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lazy-loading 'flavor' on Instance uuid 9e7bd10b-3a78-48d8-9b66-e3646635be6d {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1163.823456] env[70020]: DEBUG nova.network.neutron [req-bc4ac091-9966-4cc4-8788-b7ca9631aa5c req-e193c9e5-afba-49c6-ac16-a36f85263bb7 service nova] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Updated VIF entry in instance network info cache for port fdf42d56-b2c8-46af-be73-f38818a96a46. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1163.823820] env[70020]: DEBUG nova.network.neutron [req-bc4ac091-9966-4cc4-8788-b7ca9631aa5c req-e193c9e5-afba-49c6-ac16-a36f85263bb7 service nova] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Updating instance_info_cache with network_info: [{"id": "fdf42d56-b2c8-46af-be73-f38818a96a46", "address": "fa:16:3e:e6:ad:5b", "network": {"id": "bff860aa-ea09-4260-84e8-d9ffd2c5bf4b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1893681432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd3733a000724aab9255cb498cecdfba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfdf42d56-b2", "ovs_interfaceid": "fdf42d56-b2c8-46af-be73-f38818a96a46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1164.063497] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619105, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.070589] env[70020]: DEBUG nova.objects.base [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=70020) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1164.071651] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3e2138c-fd86-472a-a4c3-4c3334746f40 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.090341] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a26f8dc4-2747-4308-8b23-b54ec315af9a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.096095] env[70020]: DEBUG oslo_vmware.api [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 1164.096095] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]528188b7-64cf-4f53-82ed-8c2814ab1c10" [ 1164.096095] env[70020]: _type = "Task" [ 1164.096095] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.103865] env[70020]: DEBUG oslo_vmware.api [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]528188b7-64cf-4f53-82ed-8c2814ab1c10, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.143557] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: d8e9fdc1-5c5b-4d2b-b4c0-f9a1ff2a6c52] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1164.293533] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f2c0064-58c5-46d2-afa6-e7cd73df1ab1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.319102] env[70020]: DEBUG oslo_concurrency.lockutils [None req-75ab2ae2-1f20-4ab7-9850-707c65fc0424 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 3.129s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1164.321861] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3594b474-9327-4ba6-829d-615d1eb6da63 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1164.322413] env[70020]: DEBUG oslo_concurrency.lockutils [None req-57520b71-311e-44cd-8fdb-c83729d6aa46 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.782s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1164.322628] env[70020]: DEBUG nova.objects.instance [None req-57520b71-311e-44cd-8fdb-c83729d6aa46 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lazy-loading 'resources' on Instance uuid f9d4837f-0e3f-4a83-9055-04d17ef3eb23 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1164.323402] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9c0531cb-ae9c-4dd4-a1b3-07c33e91478f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.325906] env[70020]: DEBUG oslo_concurrency.lockutils [req-bc4ac091-9966-4cc4-8788-b7ca9631aa5c req-e193c9e5-afba-49c6-ac16-a36f85263bb7 service nova] Releasing lock "refresh_cache-85da90b5-c3cc-4e35-8c86-6aca07992a09" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1164.331642] env[70020]: DEBUG oslo_vmware.api [None req-3594b474-9327-4ba6-829d-615d1eb6da63 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1164.331642] env[70020]: value = "task-3619106" [ 1164.331642] env[70020]: _type = "Task" [ 1164.331642] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.340452] env[70020]: DEBUG oslo_vmware.api [None req-3594b474-9327-4ba6-829d-615d1eb6da63 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619106, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.564196] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619105, 'name': CreateVM_Task, 'duration_secs': 0.534023} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.564388] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1164.565084] env[70020]: DEBUG oslo_concurrency.lockutils [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1164.565303] env[70020]: DEBUG oslo_concurrency.lockutils [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1164.565672] env[70020]: DEBUG oslo_concurrency.lockutils [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1164.565986] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d807d16-b502-41a8-8e72-64882d98d4fd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.570382] env[70020]: DEBUG oslo_vmware.api [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1164.570382] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52bb6d1b-eef7-08af-df17-99a2f871a866" [ 1164.570382] env[70020]: _type = "Task" [ 1164.570382] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.579680] env[70020]: DEBUG oslo_vmware.api [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52bb6d1b-eef7-08af-df17-99a2f871a866, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.605312] env[70020]: DEBUG oslo_vmware.api [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]528188b7-64cf-4f53-82ed-8c2814ab1c10, 'name': SearchDatastore_Task, 'duration_secs': 0.023179} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.605708] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1164.647023] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 3dedfa48-0839-462e-8c32-ba5252f07ac0] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1164.842274] env[70020]: DEBUG oslo_vmware.api [None req-3594b474-9327-4ba6-829d-615d1eb6da63 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619106, 'name': PowerOffVM_Task, 'duration_secs': 0.226199} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.844736] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3594b474-9327-4ba6-829d-615d1eb6da63 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1164.850018] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-3594b474-9327-4ba6-829d-615d1eb6da63 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Reconfiguring VM instance instance-00000068 to detach disk 2002 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1164.852580] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6e2cc4c-9002-439e-a418-8579d1629866 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.872323] env[70020]: DEBUG oslo_vmware.api [None req-3594b474-9327-4ba6-829d-615d1eb6da63 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1164.872323] env[70020]: value = "task-3619107" [ 1164.872323] env[70020]: _type = "Task" [ 1164.872323] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.883939] env[70020]: DEBUG oslo_vmware.api [None req-3594b474-9327-4ba6-829d-615d1eb6da63 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619107, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.885158] env[70020]: INFO nova.scheduler.client.report [None req-75ab2ae2-1f20-4ab7-9850-707c65fc0424 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Deleted allocation for migration 4939402d-e13e-49ea-912f-3c8637ee0898 [ 1165.022051] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e445661-bba5-487c-80fd-ab94bbb5fbde {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.029606] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0ba4c2f-4de8-46a0-bdc1-a192936ca896 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.060076] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-655b5922-e0e4-4746-942d-4b5486cd6bac {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.068109] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c69a0eac-2de3-4d31-8656-8567b84dcd54 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.088897] env[70020]: DEBUG oslo_vmware.api [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52bb6d1b-eef7-08af-df17-99a2f871a866, 'name': SearchDatastore_Task, 'duration_secs': 0.012638} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.089387] env[70020]: DEBUG nova.compute.provider_tree [None req-57520b71-311e-44cd-8fdb-c83729d6aa46 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1165.090777] env[70020]: DEBUG oslo_concurrency.lockutils [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1165.091014] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1165.091251] env[70020]: DEBUG oslo_concurrency.lockutils [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1165.091396] env[70020]: DEBUG oslo_concurrency.lockutils [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1165.091570] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1165.092038] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5960f64e-d698-43f1-b47e-b3dea1be50c5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.100797] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1165.100972] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1165.101910] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f95e5124-c7e0-4b10-b411-d58830cebcd5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.108283] env[70020]: DEBUG oslo_vmware.api [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1165.108283] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]527f3002-fc2f-b761-e249-144ee3c3c58c" [ 1165.108283] env[70020]: _type = "Task" [ 1165.108283] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.115883] env[70020]: DEBUG oslo_vmware.api [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]527f3002-fc2f-b761-e249-144ee3c3c58c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.150029] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 42d20396-883d-4141-a226-61f476057cbe] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1165.386625] env[70020]: DEBUG oslo_vmware.api [None req-3594b474-9327-4ba6-829d-615d1eb6da63 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619107, 'name': ReconfigVM_Task, 'duration_secs': 0.247484} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.387084] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-3594b474-9327-4ba6-829d-615d1eb6da63 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Reconfigured VM instance instance-00000068 to detach disk 2002 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1165.387396] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3594b474-9327-4ba6-829d-615d1eb6da63 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1165.387744] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7dddf1bf-5866-4280-bd03-aeb2d56c497b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.392806] env[70020]: DEBUG oslo_concurrency.lockutils [None req-75ab2ae2-1f20-4ab7-9850-707c65fc0424 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "ce4796b0-4ad2-4468-9898-aaedce6dcd32" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 8.937s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1165.397386] env[70020]: DEBUG oslo_vmware.api [None req-3594b474-9327-4ba6-829d-615d1eb6da63 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1165.397386] env[70020]: value = "task-3619108" [ 1165.397386] env[70020]: _type = "Task" [ 1165.397386] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.415722] env[70020]: DEBUG oslo_vmware.api [None req-3594b474-9327-4ba6-829d-615d1eb6da63 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619108, 'name': PowerOnVM_Task} progress is 33%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.593965] env[70020]: DEBUG nova.scheduler.client.report [None req-57520b71-311e-44cd-8fdb-c83729d6aa46 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1165.618942] env[70020]: DEBUG oslo_vmware.api [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]527f3002-fc2f-b761-e249-144ee3c3c58c, 'name': SearchDatastore_Task, 'duration_secs': 0.008864} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.619717] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0d40a50-c247-43ae-8dfa-e25afb81024b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.624886] env[70020]: DEBUG oslo_vmware.api [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1165.624886] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52fe40c5-ab1a-4620-7b2c-164bcf1884b3" [ 1165.624886] env[70020]: _type = "Task" [ 1165.624886] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.632807] env[70020]: DEBUG oslo_vmware.api [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52fe40c5-ab1a-4620-7b2c-164bcf1884b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.654390] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 40fa0339-c221-4841-9444-dc957a95cf3b] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1165.910548] env[70020]: DEBUG oslo_vmware.api [None req-3594b474-9327-4ba6-829d-615d1eb6da63 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619108, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.099161] env[70020]: DEBUG oslo_concurrency.lockutils [None req-57520b71-311e-44cd-8fdb-c83729d6aa46 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.776s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1166.101415] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 1.496s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1166.122116] env[70020]: INFO nova.scheduler.client.report [None req-57520b71-311e-44cd-8fdb-c83729d6aa46 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Deleted allocations for instance f9d4837f-0e3f-4a83-9055-04d17ef3eb23 [ 1166.135939] env[70020]: DEBUG oslo_vmware.api [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52fe40c5-ab1a-4620-7b2c-164bcf1884b3, 'name': SearchDatastore_Task, 'duration_secs': 0.009866} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.136225] env[70020]: DEBUG oslo_concurrency.lockutils [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1166.136473] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 85da90b5-c3cc-4e35-8c86-6aca07992a09/85da90b5-c3cc-4e35-8c86-6aca07992a09.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1166.136729] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5d4096ac-3973-413a-bc2e-da6f71d41920 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.143911] env[70020]: DEBUG oslo_vmware.api [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1166.143911] env[70020]: value = "task-3619109" [ 1166.143911] env[70020]: _type = "Task" [ 1166.143911] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.154296] env[70020]: DEBUG oslo_vmware.api [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619109, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.157810] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 58dded95-033a-46d7-b02e-5b2f2551234c] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1166.411228] env[70020]: DEBUG oslo_vmware.api [None req-3594b474-9327-4ba6-829d-615d1eb6da63 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619108, 'name': PowerOnVM_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.521536] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d369a7f0-3425-470f-97e8-9438ab70027c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "ce4796b0-4ad2-4468-9898-aaedce6dcd32" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1166.521536] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d369a7f0-3425-470f-97e8-9438ab70027c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "ce4796b0-4ad2-4468-9898-aaedce6dcd32" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1166.521875] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d369a7f0-3425-470f-97e8-9438ab70027c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "ce4796b0-4ad2-4468-9898-aaedce6dcd32-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1166.521944] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d369a7f0-3425-470f-97e8-9438ab70027c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "ce4796b0-4ad2-4468-9898-aaedce6dcd32-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1166.523490] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d369a7f0-3425-470f-97e8-9438ab70027c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "ce4796b0-4ad2-4468-9898-aaedce6dcd32-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1166.524981] env[70020]: INFO nova.compute.manager [None req-d369a7f0-3425-470f-97e8-9438ab70027c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Terminating instance [ 1166.633992] env[70020]: DEBUG oslo_concurrency.lockutils [None req-57520b71-311e-44cd-8fdb-c83729d6aa46 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "f9d4837f-0e3f-4a83-9055-04d17ef3eb23" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.674s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1166.658671] env[70020]: DEBUG oslo_vmware.api [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619109, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.663800] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: a39731d2-0b9b-41fa-b9ac-f80193a26d20] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1166.811989] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0325de9-624b-42dd-88b1-1b625ef6c08c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.818919] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2bee554-26ab-4eda-b01d-76c0a9017ec5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.849795] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f3cc577-9c7b-4cba-9057-3420aa61755e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.857850] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f36d5184-74a0-4f9c-8a32-4115f8200f6c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.872676] env[70020]: DEBUG nova.compute.provider_tree [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1166.908802] env[70020]: DEBUG oslo_vmware.api [None req-3594b474-9327-4ba6-829d-615d1eb6da63 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619108, 'name': PowerOnVM_Task, 'duration_secs': 1.031143} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.909198] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3594b474-9327-4ba6-829d-615d1eb6da63 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1166.909303] env[70020]: DEBUG nova.compute.manager [None req-3594b474-9327-4ba6-829d-615d1eb6da63 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1166.910023] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e60fe01d-a0c2-4d69-bc44-6c46ef4f8e7f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.970816] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "ca63297c-b7bc-45e9-8850-f46050905c26" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1166.971066] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "ca63297c-b7bc-45e9-8850-f46050905c26" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1167.029734] env[70020]: DEBUG nova.compute.manager [None req-d369a7f0-3425-470f-97e8-9438ab70027c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1167.030052] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d369a7f0-3425-470f-97e8-9438ab70027c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1167.030929] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c66723a-c76a-4df2-922f-42f1c6398b98 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.038973] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-d369a7f0-3425-470f-97e8-9438ab70027c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1167.039233] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-39dd168a-1c53-445b-9aa9-5a6d7a6e37c2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.044979] env[70020]: DEBUG oslo_vmware.api [None req-d369a7f0-3425-470f-97e8-9438ab70027c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1167.044979] env[70020]: value = "task-3619110" [ 1167.044979] env[70020]: _type = "Task" [ 1167.044979] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.052363] env[70020]: DEBUG oslo_vmware.api [None req-d369a7f0-3425-470f-97e8-9438ab70027c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619110, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.155104] env[70020]: DEBUG oslo_vmware.api [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619109, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.167745] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 563512c2-b80f-4f14-add5-d48e2b7a0ee9] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1167.378021] env[70020]: DEBUG nova.scheduler.client.report [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1167.475768] env[70020]: DEBUG nova.compute.manager [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1167.554452] env[70020]: DEBUG oslo_vmware.api [None req-d369a7f0-3425-470f-97e8-9438ab70027c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619110, 'name': PowerOffVM_Task, 'duration_secs': 0.331686} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.554834] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-d369a7f0-3425-470f-97e8-9438ab70027c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1167.555037] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d369a7f0-3425-470f-97e8-9438ab70027c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1167.555295] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-62a8f208-d563-49e2-88c4-7140f99936a8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.614275] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d369a7f0-3425-470f-97e8-9438ab70027c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1167.614552] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d369a7f0-3425-470f-97e8-9438ab70027c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1167.614704] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-d369a7f0-3425-470f-97e8-9438ab70027c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Deleting the datastore file [datastore1] ce4796b0-4ad2-4468-9898-aaedce6dcd32 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1167.615012] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-90e91ccd-2489-42f1-9a39-18aaf7536e0c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.621273] env[70020]: DEBUG oslo_vmware.api [None req-d369a7f0-3425-470f-97e8-9438ab70027c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1167.621273] env[70020]: value = "task-3619112" [ 1167.621273] env[70020]: _type = "Task" [ 1167.621273] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.629266] env[70020]: DEBUG oslo_vmware.api [None req-d369a7f0-3425-470f-97e8-9438ab70027c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619112, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.655481] env[70020]: DEBUG oslo_vmware.api [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619109, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.671180] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: da07cb36-244f-4f48-a5b6-8d00324c1edf] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1167.997384] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.131753] env[70020]: DEBUG oslo_vmware.api [None req-d369a7f0-3425-470f-97e8-9438ab70027c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619112, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.260824} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.132020] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-d369a7f0-3425-470f-97e8-9438ab70027c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1168.132424] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d369a7f0-3425-470f-97e8-9438ab70027c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1168.132581] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d369a7f0-3425-470f-97e8-9438ab70027c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1168.132775] env[70020]: INFO nova.compute.manager [None req-d369a7f0-3425-470f-97e8-9438ab70027c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1168.133016] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d369a7f0-3425-470f-97e8-9438ab70027c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1168.133226] env[70020]: DEBUG nova.compute.manager [-] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1168.133321] env[70020]: DEBUG nova.network.neutron [-] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1168.155691] env[70020]: DEBUG oslo_vmware.api [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619109, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.692981} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.156107] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 85da90b5-c3cc-4e35-8c86-6aca07992a09/85da90b5-c3cc-4e35-8c86-6aca07992a09.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1168.156208] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1168.156422] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1017a9cc-a520-4c9c-923c-6ba0e12e5e13 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.163089] env[70020]: DEBUG oslo_vmware.api [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1168.163089] env[70020]: value = "task-3619113" [ 1168.163089] env[70020]: _type = "Task" [ 1168.163089] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.171480] env[70020]: DEBUG oslo_vmware.api [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619113, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.175280] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: d45966fe-98ff-4466-8e7e-90550034742f] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1168.387831] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.286s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1168.391454] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.394s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1168.393091] env[70020]: INFO nova.compute.claims [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1168.404803] env[70020]: DEBUG nova.compute.manager [req-ae34685f-4d75-4064-abdf-87b4fae3aec3 req-adee7aec-d10a-4937-8374-16ced38ebff5 service nova] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Received event network-vif-deleted-ff971a6c-7fab-4c04-a75d-259986b9fce0 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1168.405098] env[70020]: INFO nova.compute.manager [req-ae34685f-4d75-4064-abdf-87b4fae3aec3 req-adee7aec-d10a-4937-8374-16ced38ebff5 service nova] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Neutron deleted interface ff971a6c-7fab-4c04-a75d-259986b9fce0; detaching it from the instance and deleting it from the info cache [ 1168.405195] env[70020]: DEBUG nova.network.neutron [req-ae34685f-4d75-4064-abdf-87b4fae3aec3 req-adee7aec-d10a-4937-8374-16ced38ebff5 service nova] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1168.620225] env[70020]: DEBUG oslo_concurrency.lockutils [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "1b25f8db-457e-4948-b9da-35e2fa5b897e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.620376] env[70020]: DEBUG oslo_concurrency.lockutils [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "1b25f8db-457e-4948-b9da-35e2fa5b897e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1168.672841] env[70020]: DEBUG oslo_vmware.api [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619113, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.272932} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.673141] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1168.673944] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-416308ec-c50a-47c8-ae5d-dd0a03395f3d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.677890] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 2ccd34c8-b433-41be-b800-d06a0595bff9] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1168.699402] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] 85da90b5-c3cc-4e35-8c86-6aca07992a09/85da90b5-c3cc-4e35-8c86-6aca07992a09.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1168.699402] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9f26ef06-4311-4cf5-be6b-4e5de3fc9283 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.718730] env[70020]: DEBUG oslo_vmware.api [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1168.718730] env[70020]: value = "task-3619114" [ 1168.718730] env[70020]: _type = "Task" [ 1168.718730] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.727710] env[70020]: DEBUG oslo_vmware.api [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619114, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.886079] env[70020]: DEBUG nova.network.neutron [-] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1168.904760] env[70020]: DEBUG nova.compute.manager [req-5653b139-8923-436e-a85d-e43ac72f15ff req-b63d9cc7-b733-4f3c-80a8-863de4b79e65 service nova] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Received event network-changed-2573d470-4c75-40c7-9e9b-6130f5e14092 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1168.904963] env[70020]: DEBUG nova.compute.manager [req-5653b139-8923-436e-a85d-e43ac72f15ff req-b63d9cc7-b733-4f3c-80a8-863de4b79e65 service nova] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Refreshing instance network info cache due to event network-changed-2573d470-4c75-40c7-9e9b-6130f5e14092. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1168.905210] env[70020]: DEBUG oslo_concurrency.lockutils [req-5653b139-8923-436e-a85d-e43ac72f15ff req-b63d9cc7-b733-4f3c-80a8-863de4b79e65 service nova] Acquiring lock "refresh_cache-9e7bd10b-3a78-48d8-9b66-e3646635be6d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1168.905437] env[70020]: DEBUG oslo_concurrency.lockutils [req-5653b139-8923-436e-a85d-e43ac72f15ff req-b63d9cc7-b733-4f3c-80a8-863de4b79e65 service nova] Acquired lock "refresh_cache-9e7bd10b-3a78-48d8-9b66-e3646635be6d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1168.905502] env[70020]: DEBUG nova.network.neutron [req-5653b139-8923-436e-a85d-e43ac72f15ff req-b63d9cc7-b733-4f3c-80a8-863de4b79e65 service nova] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Refreshing network info cache for port 2573d470-4c75-40c7-9e9b-6130f5e14092 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1168.907382] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a82bddd5-c893-4985-847b-c260e83da0c5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.926765] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e8d6ef7-6dda-416b-8240-af648cd7c2a2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.972991] env[70020]: DEBUG nova.compute.manager [req-ae34685f-4d75-4064-abdf-87b4fae3aec3 req-adee7aec-d10a-4937-8374-16ced38ebff5 service nova] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Detach interface failed, port_id=ff971a6c-7fab-4c04-a75d-259986b9fce0, reason: Instance ce4796b0-4ad2-4468-9898-aaedce6dcd32 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1168.974286] env[70020]: INFO nova.scheduler.client.report [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Deleted allocation for migration 4b514c28-2530-40de-84ec-b4948bca618a [ 1169.122898] env[70020]: DEBUG nova.compute.manager [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1169.200328] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: abc194e3-fb6a-4f2a-8886-e2777530a2a3] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1169.229421] env[70020]: DEBUG oslo_vmware.api [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619114, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.390044] env[70020]: INFO nova.compute.manager [-] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Took 1.26 seconds to deallocate network for instance. [ 1169.479391] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "c0a78ace-307e-4156-beb3-a53061acff7f" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 8.311s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1169.561123] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-198ddb9f-6fbc-4287-8e3d-74016152dfe0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.568882] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a2550a3-c9a1-451e-9cce-27798f290288 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.601914] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5a5e017-ad0f-4599-8123-e2a72c35d176 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.609309] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ab6a6fd-95f0-4031-874a-d7fd1e59e117 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.622373] env[70020]: DEBUG nova.compute.provider_tree [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1169.634415] env[70020]: DEBUG nova.network.neutron [req-5653b139-8923-436e-a85d-e43ac72f15ff req-b63d9cc7-b733-4f3c-80a8-863de4b79e65 service nova] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Updated VIF entry in instance network info cache for port 2573d470-4c75-40c7-9e9b-6130f5e14092. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1169.634750] env[70020]: DEBUG nova.network.neutron [req-5653b139-8923-436e-a85d-e43ac72f15ff req-b63d9cc7-b733-4f3c-80a8-863de4b79e65 service nova] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Updating instance_info_cache with network_info: [{"id": "2573d470-4c75-40c7-9e9b-6130f5e14092", "address": "fa:16:3e:bc:56:02", "network": {"id": "5481228b-9a6b-468e-bca7-0123c469ae56", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-340609977-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.161", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e7aae0b70f9d465ebcb9defe385fa434", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2573d470-4c", "ovs_interfaceid": "2573d470-4c75-40c7-9e9b-6130f5e14092", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1169.642027] env[70020]: DEBUG oslo_concurrency.lockutils [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1169.704124] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 9dec24d6-af8a-41b9-920c-e4420fc69417] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1169.728572] env[70020]: DEBUG oslo_vmware.api [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619114, 'name': ReconfigVM_Task, 'duration_secs': 0.689265} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.729378] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Reconfigured VM instance instance-00000071 to attach disk [datastore1] 85da90b5-c3cc-4e35-8c86-6aca07992a09/85da90b5-c3cc-4e35-8c86-6aca07992a09.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1169.729985] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9153ada7-e087-4b07-88e5-1fab0f963c4f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.736609] env[70020]: DEBUG oslo_vmware.api [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1169.736609] env[70020]: value = "task-3619115" [ 1169.736609] env[70020]: _type = "Task" [ 1169.736609] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.744249] env[70020]: DEBUG oslo_vmware.api [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619115, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.896969] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d369a7f0-3425-470f-97e8-9438ab70027c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1170.137033] env[70020]: DEBUG oslo_concurrency.lockutils [req-5653b139-8923-436e-a85d-e43ac72f15ff req-b63d9cc7-b733-4f3c-80a8-863de4b79e65 service nova] Releasing lock "refresh_cache-9e7bd10b-3a78-48d8-9b66-e3646635be6d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1170.142995] env[70020]: ERROR nova.scheduler.client.report [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [req-3d092114-1f1e-4e67-9c1c-a67ab932dbb1] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-3d092114-1f1e-4e67-9c1c-a67ab932dbb1"}]} [ 1170.158246] env[70020]: DEBUG nova.scheduler.client.report [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1170.171605] env[70020]: DEBUG nova.scheduler.client.report [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1170.171812] env[70020]: DEBUG nova.compute.provider_tree [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1170.182023] env[70020]: DEBUG nova.scheduler.client.report [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1170.198945] env[70020]: DEBUG nova.scheduler.client.report [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1170.207624] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 3a4f2342-58e7-436b-a779-0fa093b52409] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1170.249512] env[70020]: DEBUG oslo_vmware.api [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619115, 'name': Rename_Task, 'duration_secs': 0.172965} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.249812] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1170.250081] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fc5699d9-a10b-411b-b998-5ec821a47765 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.256611] env[70020]: DEBUG oslo_vmware.api [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1170.256611] env[70020]: value = "task-3619116" [ 1170.256611] env[70020]: _type = "Task" [ 1170.256611] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.265016] env[70020]: DEBUG oslo_vmware.api [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619116, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.363967] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceefe6fa-5a03-4a36-822b-e36b05aa346b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.373052] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0e789f5-584f-4d08-93ab-625efdf1d81c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.401803] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e367c52-7c90-40df-bc97-b96bc0d6dc1d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.409587] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfd8c20a-51d8-4f31-83e8-c9a0e3a9175a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.423543] env[70020]: DEBUG nova.compute.provider_tree [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1170.693951] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "c0a78ace-307e-4156-beb3-a53061acff7f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1170.694236] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "c0a78ace-307e-4156-beb3-a53061acff7f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1170.694446] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "c0a78ace-307e-4156-beb3-a53061acff7f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1170.694627] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "c0a78ace-307e-4156-beb3-a53061acff7f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1170.694793] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "c0a78ace-307e-4156-beb3-a53061acff7f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1170.697446] env[70020]: INFO nova.compute.manager [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Terminating instance [ 1170.710450] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: ff4e958d-0068-429f-af76-5e7d4dd147f3] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1170.769886] env[70020]: DEBUG oslo_vmware.api [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619116, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.932557] env[70020]: DEBUG nova.compute.manager [req-20516b34-dc1e-43e8-9c7f-6df02d445113 req-9486bc1a-c9b5-4b0f-867a-6342bb116a11 service nova] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Received event network-changed-2573d470-4c75-40c7-9e9b-6130f5e14092 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1170.932680] env[70020]: DEBUG nova.compute.manager [req-20516b34-dc1e-43e8-9c7f-6df02d445113 req-9486bc1a-c9b5-4b0f-867a-6342bb116a11 service nova] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Refreshing instance network info cache due to event network-changed-2573d470-4c75-40c7-9e9b-6130f5e14092. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1170.932895] env[70020]: DEBUG oslo_concurrency.lockutils [req-20516b34-dc1e-43e8-9c7f-6df02d445113 req-9486bc1a-c9b5-4b0f-867a-6342bb116a11 service nova] Acquiring lock "refresh_cache-9e7bd10b-3a78-48d8-9b66-e3646635be6d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1170.933047] env[70020]: DEBUG oslo_concurrency.lockutils [req-20516b34-dc1e-43e8-9c7f-6df02d445113 req-9486bc1a-c9b5-4b0f-867a-6342bb116a11 service nova] Acquired lock "refresh_cache-9e7bd10b-3a78-48d8-9b66-e3646635be6d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1170.933208] env[70020]: DEBUG nova.network.neutron [req-20516b34-dc1e-43e8-9c7f-6df02d445113 req-9486bc1a-c9b5-4b0f-867a-6342bb116a11 service nova] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Refreshing network info cache for port 2573d470-4c75-40c7-9e9b-6130f5e14092 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1170.958712] env[70020]: DEBUG nova.scheduler.client.report [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updated inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with generation 165 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1170.959021] env[70020]: DEBUG nova.compute.provider_tree [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updating resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d generation from 165 to 166 during operation: update_inventory {{(pid=70020) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1170.959169] env[70020]: DEBUG nova.compute.provider_tree [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1171.201736] env[70020]: DEBUG nova.compute.manager [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1171.201736] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1171.202763] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a48bb40-671a-4fbc-a7a0-e682143c2783 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.210628] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1171.210966] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-21c8eaf9-003b-478f-a2a5-07702c5f2157 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.215030] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: edef9245-4048-4ea4-90cc-ebed54498d88] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1171.218967] env[70020]: DEBUG oslo_vmware.api [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 1171.218967] env[70020]: value = "task-3619117" [ 1171.218967] env[70020]: _type = "Task" [ 1171.218967] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.228862] env[70020]: DEBUG oslo_vmware.api [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3619117, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.271986] env[70020]: DEBUG oslo_vmware.api [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619116, 'name': PowerOnVM_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.464351] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.073s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1171.464938] env[70020]: DEBUG nova.compute.manager [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1171.470074] env[70020]: DEBUG oslo_concurrency.lockutils [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.828s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1171.471623] env[70020]: INFO nova.compute.claims [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1171.677400] env[70020]: DEBUG nova.network.neutron [req-20516b34-dc1e-43e8-9c7f-6df02d445113 req-9486bc1a-c9b5-4b0f-867a-6342bb116a11 service nova] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Updated VIF entry in instance network info cache for port 2573d470-4c75-40c7-9e9b-6130f5e14092. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1171.677757] env[70020]: DEBUG nova.network.neutron [req-20516b34-dc1e-43e8-9c7f-6df02d445113 req-9486bc1a-c9b5-4b0f-867a-6342bb116a11 service nova] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Updating instance_info_cache with network_info: [{"id": "2573d470-4c75-40c7-9e9b-6130f5e14092", "address": "fa:16:3e:bc:56:02", "network": {"id": "5481228b-9a6b-468e-bca7-0123c469ae56", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-340609977-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.161", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e7aae0b70f9d465ebcb9defe385fa434", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2573d470-4c", "ovs_interfaceid": "2573d470-4c75-40c7-9e9b-6130f5e14092", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1171.720018] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 7e2c2cef-c778-44b1-8c0d-9cd5cf4916b6] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1171.730868] env[70020]: DEBUG oslo_vmware.api [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3619117, 'name': PowerOffVM_Task, 'duration_secs': 0.211803} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.730868] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1171.730868] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1171.731172] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8b1b70f2-3cc1-4f90-a332-e56cfeff461f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.767248] env[70020]: DEBUG oslo_vmware.api [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619116, 'name': PowerOnVM_Task, 'duration_secs': 1.044031} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.767526] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1171.767701] env[70020]: INFO nova.compute.manager [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Took 10.35 seconds to spawn the instance on the hypervisor. [ 1171.767878] env[70020]: DEBUG nova.compute.manager [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1171.768680] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a039d23b-637e-4613-9ba3-03221eafd282 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.795559] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1171.795772] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1171.795973] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Deleting the datastore file [datastore1] c0a78ace-307e-4156-beb3-a53061acff7f {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1171.796246] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d741234a-691a-4edd-b357-f05e4a156000 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.802012] env[70020]: DEBUG oslo_vmware.api [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for the task: (returnval){ [ 1171.802012] env[70020]: value = "task-3619119" [ 1171.802012] env[70020]: _type = "Task" [ 1171.802012] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.809980] env[70020]: DEBUG oslo_vmware.api [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3619119, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.976767] env[70020]: DEBUG nova.compute.utils [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1171.980037] env[70020]: DEBUG nova.compute.manager [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1171.980037] env[70020]: DEBUG nova.network.neutron [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1172.016320] env[70020]: DEBUG nova.policy [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1374458c1943470eba7e774715ba1ca9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f3f6d704dd464768953c41d34d34d944', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1172.180997] env[70020]: DEBUG oslo_concurrency.lockutils [req-20516b34-dc1e-43e8-9c7f-6df02d445113 req-9486bc1a-c9b5-4b0f-867a-6342bb116a11 service nova] Releasing lock "refresh_cache-9e7bd10b-3a78-48d8-9b66-e3646635be6d" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1172.225853] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 45926a02-d0fe-4274-ba47-b97b3e12e4cd] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1172.287605] env[70020]: INFO nova.compute.manager [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Took 17.22 seconds to build instance. [ 1172.314350] env[70020]: DEBUG oslo_vmware.api [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Task: {'id': task-3619119, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.126885} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.314350] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1172.314350] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1172.314585] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1172.314847] env[70020]: INFO nova.compute.manager [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1172.315144] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1172.315462] env[70020]: DEBUG nova.compute.manager [-] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1172.315588] env[70020]: DEBUG nova.network.neutron [-] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1172.329760] env[70020]: DEBUG nova.network.neutron [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Successfully created port: 2e2d4a2d-9a02-4d7e-b369-98b035a79190 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1172.481601] env[70020]: DEBUG nova.compute.manager [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1172.708743] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58f22656-3ccc-4921-af95-6a323fb233fc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.717029] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be8abbba-d7ef-41fe-a656-c8270192ce83 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.746235] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 8317f386-44d0-4b1b-8590-d0336fafac21] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1172.748791] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73e58959-80f1-4d7f-ac0d-a27ea6a3103f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.756522] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3da05e4-c9b6-42e7-95ef-66dd45184606 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.771131] env[70020]: DEBUG nova.compute.provider_tree [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1172.792243] env[70020]: DEBUG oslo_concurrency.lockutils [None req-33faa09d-90c7-4b4c-bc0b-ac9c8d2406ed tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "85da90b5-c3cc-4e35-8c86-6aca07992a09" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.738s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1172.819980] env[70020]: DEBUG nova.compute.manager [req-e6b56bdc-782d-4f74-8516-56a1a5e7dfdf req-291d8b3b-411b-487a-a568-d6c9948cb3b9 service nova] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Received event network-vif-deleted-4332b789-1993-4df4-8099-15089bf507db {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1172.820165] env[70020]: INFO nova.compute.manager [req-e6b56bdc-782d-4f74-8516-56a1a5e7dfdf req-291d8b3b-411b-487a-a568-d6c9948cb3b9 service nova] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Neutron deleted interface 4332b789-1993-4df4-8099-15089bf507db; detaching it from the instance and deleting it from the info cache [ 1172.820432] env[70020]: DEBUG nova.network.neutron [req-e6b56bdc-782d-4f74-8516-56a1a5e7dfdf req-291d8b3b-411b-487a-a568-d6c9948cb3b9 service nova] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1172.997037] env[70020]: DEBUG oslo_concurrency.lockutils [None req-cdb7c76c-75ab-4c5e-9368-f9a6ad1ef625 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "85da90b5-c3cc-4e35-8c86-6aca07992a09" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1172.997037] env[70020]: DEBUG oslo_concurrency.lockutils [None req-cdb7c76c-75ab-4c5e-9368-f9a6ad1ef625 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "85da90b5-c3cc-4e35-8c86-6aca07992a09" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1172.997037] env[70020]: DEBUG nova.compute.manager [None req-cdb7c76c-75ab-4c5e-9368-f9a6ad1ef625 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1172.997606] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8b364f7-b38d-4f71-8c89-615d90aef948 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.005059] env[70020]: DEBUG nova.compute.manager [None req-cdb7c76c-75ab-4c5e-9368-f9a6ad1ef625 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=70020) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1173.005509] env[70020]: DEBUG nova.objects.instance [None req-cdb7c76c-75ab-4c5e-9368-f9a6ad1ef625 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lazy-loading 'flavor' on Instance uuid 85da90b5-c3cc-4e35-8c86-6aca07992a09 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1173.253326] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: d65ab5e0-189c-43e1-accf-16248ad02852] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1173.302157] env[70020]: DEBUG nova.network.neutron [-] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1173.322908] env[70020]: DEBUG nova.scheduler.client.report [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Updated inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with generation 166 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1173.323182] env[70020]: DEBUG nova.compute.provider_tree [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Updating resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d generation from 166 to 167 during operation: update_inventory {{(pid=70020) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1173.323360] env[70020]: DEBUG nova.compute.provider_tree [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1173.327051] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0e81c350-ca41-437d-a67e-ea868230a79b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.336449] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ea55c93-8397-423d-a45a-541eb2ed588a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.368762] env[70020]: DEBUG nova.compute.manager [req-e6b56bdc-782d-4f74-8516-56a1a5e7dfdf req-291d8b3b-411b-487a-a568-d6c9948cb3b9 service nova] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Detach interface failed, port_id=4332b789-1993-4df4-8099-15089bf507db, reason: Instance c0a78ace-307e-4156-beb3-a53061acff7f could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1173.490799] env[70020]: DEBUG nova.compute.manager [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1173.538616] env[70020]: DEBUG nova.virt.hardware [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1173.538852] env[70020]: DEBUG nova.virt.hardware [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1173.539011] env[70020]: DEBUG nova.virt.hardware [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1173.539207] env[70020]: DEBUG nova.virt.hardware [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1173.539352] env[70020]: DEBUG nova.virt.hardware [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1173.539494] env[70020]: DEBUG nova.virt.hardware [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1173.539696] env[70020]: DEBUG nova.virt.hardware [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1173.539867] env[70020]: DEBUG nova.virt.hardware [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1173.540069] env[70020]: DEBUG nova.virt.hardware [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1173.540239] env[70020]: DEBUG nova.virt.hardware [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1173.540403] env[70020]: DEBUG nova.virt.hardware [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1173.541409] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a63088e-f6db-4ae7-b1e6-fcfec97ada44 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.549740] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94c0a18c-eeb4-4ed4-8acd-9e01a899e11b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.756541] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 61875dcc-5b76-409b-987f-4ae875909257] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1173.804193] env[70020]: INFO nova.compute.manager [-] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Took 1.49 seconds to deallocate network for instance. [ 1173.827929] env[70020]: DEBUG oslo_concurrency.lockutils [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.358s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1173.828500] env[70020]: DEBUG nova.compute.manager [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1173.831228] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d369a7f0-3425-470f-97e8-9438ab70027c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.934s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1173.831503] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d369a7f0-3425-470f-97e8-9438ab70027c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1173.885936] env[70020]: INFO nova.scheduler.client.report [None req-d369a7f0-3425-470f-97e8-9438ab70027c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Deleted allocations for instance ce4796b0-4ad2-4468-9898-aaedce6dcd32 [ 1174.013090] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-cdb7c76c-75ab-4c5e-9368-f9a6ad1ef625 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1174.013977] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a8e7301c-06b9-4d64-916d-5a0912465058 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.021358] env[70020]: DEBUG oslo_vmware.api [None req-cdb7c76c-75ab-4c5e-9368-f9a6ad1ef625 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1174.021358] env[70020]: value = "task-3619120" [ 1174.021358] env[70020]: _type = "Task" [ 1174.021358] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.030910] env[70020]: DEBUG oslo_vmware.api [None req-cdb7c76c-75ab-4c5e-9368-f9a6ad1ef625 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619120, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.110917] env[70020]: DEBUG nova.network.neutron [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Successfully updated port: 2e2d4a2d-9a02-4d7e-b369-98b035a79190 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1174.259942] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 00232eca-da03-49ea-b62b-d9721739b0ec] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1174.314267] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1174.314267] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1174.314267] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1174.336404] env[70020]: DEBUG nova.compute.utils [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1174.337112] env[70020]: DEBUG nova.compute.manager [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1174.337380] env[70020]: DEBUG nova.network.neutron [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1174.366769] env[70020]: INFO nova.scheduler.client.report [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Deleted allocations for instance c0a78ace-307e-4156-beb3-a53061acff7f [ 1174.393448] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d369a7f0-3425-470f-97e8-9438ab70027c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "ce4796b0-4ad2-4468-9898-aaedce6dcd32" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.872s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1174.444238] env[70020]: DEBUG nova.policy [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '33279b0a8dc848ceb443776f840845c3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '16f59a8f930846ec9299416b9ec5dd48', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1174.531650] env[70020]: DEBUG oslo_vmware.api [None req-cdb7c76c-75ab-4c5e-9368-f9a6ad1ef625 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619120, 'name': PowerOffVM_Task, 'duration_secs': 0.196071} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.531913] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-cdb7c76c-75ab-4c5e-9368-f9a6ad1ef625 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1174.532120] env[70020]: DEBUG nova.compute.manager [None req-cdb7c76c-75ab-4c5e-9368-f9a6ad1ef625 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1174.532864] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04e8e643-b8de-47ee-a29d-146541352795 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.613987] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "refresh_cache-ca63297c-b7bc-45e9-8850-f46050905c26" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1174.614218] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquired lock "refresh_cache-ca63297c-b7bc-45e9-8850-f46050905c26" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1174.614569] env[70020]: DEBUG nova.network.neutron [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1174.750264] env[70020]: DEBUG nova.network.neutron [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Successfully created port: f17543b2-5415-422a-b395-b7aa575543a0 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1174.765013] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 422ca332-5952-443c-a22e-67b1b45df5b9] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1174.787903] env[70020]: DEBUG oslo_concurrency.lockutils [None req-80cb339f-3419-42df-a2d5-93b83f3a8b5d tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "8dbb1de0-38de-493f-9512-b8754bab7bcb" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1174.788357] env[70020]: DEBUG oslo_concurrency.lockutils [None req-80cb339f-3419-42df-a2d5-93b83f3a8b5d tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "8dbb1de0-38de-493f-9512-b8754bab7bcb" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1174.840843] env[70020]: DEBUG nova.compute.manager [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1174.852446] env[70020]: DEBUG nova.compute.manager [req-d98c8365-1b4c-4f0c-b8df-68f6cd97fafc req-7750a27c-ce41-4804-9b15-01f50703be55 service nova] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Received event network-vif-plugged-2e2d4a2d-9a02-4d7e-b369-98b035a79190 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1174.852897] env[70020]: DEBUG oslo_concurrency.lockutils [req-d98c8365-1b4c-4f0c-b8df-68f6cd97fafc req-7750a27c-ce41-4804-9b15-01f50703be55 service nova] Acquiring lock "ca63297c-b7bc-45e9-8850-f46050905c26-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1174.852897] env[70020]: DEBUG oslo_concurrency.lockutils [req-d98c8365-1b4c-4f0c-b8df-68f6cd97fafc req-7750a27c-ce41-4804-9b15-01f50703be55 service nova] Lock "ca63297c-b7bc-45e9-8850-f46050905c26-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1174.853143] env[70020]: DEBUG oslo_concurrency.lockutils [req-d98c8365-1b4c-4f0c-b8df-68f6cd97fafc req-7750a27c-ce41-4804-9b15-01f50703be55 service nova] Lock "ca63297c-b7bc-45e9-8850-f46050905c26-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1174.853239] env[70020]: DEBUG nova.compute.manager [req-d98c8365-1b4c-4f0c-b8df-68f6cd97fafc req-7750a27c-ce41-4804-9b15-01f50703be55 service nova] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] No waiting events found dispatching network-vif-plugged-2e2d4a2d-9a02-4d7e-b369-98b035a79190 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1174.853399] env[70020]: WARNING nova.compute.manager [req-d98c8365-1b4c-4f0c-b8df-68f6cd97fafc req-7750a27c-ce41-4804-9b15-01f50703be55 service nova] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Received unexpected event network-vif-plugged-2e2d4a2d-9a02-4d7e-b369-98b035a79190 for instance with vm_state building and task_state spawning. [ 1174.853552] env[70020]: DEBUG nova.compute.manager [req-d98c8365-1b4c-4f0c-b8df-68f6cd97fafc req-7750a27c-ce41-4804-9b15-01f50703be55 service nova] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Received event network-changed-2e2d4a2d-9a02-4d7e-b369-98b035a79190 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1174.854438] env[70020]: DEBUG nova.compute.manager [req-d98c8365-1b4c-4f0c-b8df-68f6cd97fafc req-7750a27c-ce41-4804-9b15-01f50703be55 service nova] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Refreshing instance network info cache due to event network-changed-2e2d4a2d-9a02-4d7e-b369-98b035a79190. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1174.854438] env[70020]: DEBUG oslo_concurrency.lockutils [req-d98c8365-1b4c-4f0c-b8df-68f6cd97fafc req-7750a27c-ce41-4804-9b15-01f50703be55 service nova] Acquiring lock "refresh_cache-ca63297c-b7bc-45e9-8850-f46050905c26" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1174.877795] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b155af85-5adf-45f5-8b25-404413d45a7f tempest-DeleteServersTestJSON-1971284771 tempest-DeleteServersTestJSON-1971284771-project-member] Lock "c0a78ace-307e-4156-beb3-a53061acff7f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.183s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1175.044596] env[70020]: DEBUG oslo_concurrency.lockutils [None req-cdb7c76c-75ab-4c5e-9368-f9a6ad1ef625 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "85da90b5-c3cc-4e35-8c86-6aca07992a09" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.048s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1175.147905] env[70020]: DEBUG nova.network.neutron [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1175.269239] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: d3dbc3d1-bba7-4803-bacb-02de27a6a4ff] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1175.291542] env[70020]: INFO nova.compute.manager [None req-80cb339f-3419-42df-a2d5-93b83f3a8b5d tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Detaching volume 1acb6a62-8f9b-4b43-be82-4306b549a1ba [ 1175.302969] env[70020]: DEBUG nova.network.neutron [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Updating instance_info_cache with network_info: [{"id": "2e2d4a2d-9a02-4d7e-b369-98b035a79190", "address": "fa:16:3e:41:7e:ca", "network": {"id": "c7c6ab1d-12b0-4699-ab0e-47b8d23ee3bc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2032377329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3f6d704dd464768953c41d34d34d944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6a6f7bb-6106-4cfd-9aef-b85628d0cefa", "external-id": "nsx-vlan-transportzone-194", "segmentation_id": 194, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e2d4a2d-9a", "ovs_interfaceid": "2e2d4a2d-9a02-4d7e-b369-98b035a79190", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1175.341985] env[70020]: INFO nova.virt.block_device [None req-80cb339f-3419-42df-a2d5-93b83f3a8b5d tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Attempting to driver detach volume 1acb6a62-8f9b-4b43-be82-4306b549a1ba from mountpoint /dev/sdb [ 1175.341985] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-80cb339f-3419-42df-a2d5-93b83f3a8b5d tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Volume detach. Driver type: vmdk {{(pid=70020) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1175.342123] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-80cb339f-3419-42df-a2d5-93b83f3a8b5d tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721804', 'volume_id': '1acb6a62-8f9b-4b43-be82-4306b549a1ba', 'name': 'volume-1acb6a62-8f9b-4b43-be82-4306b549a1ba', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8dbb1de0-38de-493f-9512-b8754bab7bcb', 'attached_at': '', 'detached_at': '', 'volume_id': '1acb6a62-8f9b-4b43-be82-4306b549a1ba', 'serial': '1acb6a62-8f9b-4b43-be82-4306b549a1ba'} {{(pid=70020) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1175.343209] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8da9ec3-35e3-45ad-b003-e5106ea111da {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.371024] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cfa864f-dcd9-4607-a654-3da075713c56 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.378251] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ec87a17-a38a-4d5b-a587-17ce099257c4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.398154] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e446e2b-5b87-4910-9fd9-0496277b4e15 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.413029] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-80cb339f-3419-42df-a2d5-93b83f3a8b5d tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] The volume has not been displaced from its original location: [datastore1] volume-1acb6a62-8f9b-4b43-be82-4306b549a1ba/volume-1acb6a62-8f9b-4b43-be82-4306b549a1ba.vmdk. No consolidation needed. {{(pid=70020) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1175.418269] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-80cb339f-3419-42df-a2d5-93b83f3a8b5d tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Reconfiguring VM instance instance-00000063 to detach disk 2001 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1175.418599] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-462dc39c-77b5-41ed-a676-07aa071893d5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.436943] env[70020]: DEBUG oslo_vmware.api [None req-80cb339f-3419-42df-a2d5-93b83f3a8b5d tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1175.436943] env[70020]: value = "task-3619122" [ 1175.436943] env[70020]: _type = "Task" [ 1175.436943] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.447835] env[70020]: DEBUG oslo_vmware.api [None req-80cb339f-3419-42df-a2d5-93b83f3a8b5d tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619122, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.772675] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 1d9218db-05d8-4e33-837f-e9865946237f] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1175.805488] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Releasing lock "refresh_cache-ca63297c-b7bc-45e9-8850-f46050905c26" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1175.805766] env[70020]: DEBUG nova.compute.manager [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Instance network_info: |[{"id": "2e2d4a2d-9a02-4d7e-b369-98b035a79190", "address": "fa:16:3e:41:7e:ca", "network": {"id": "c7c6ab1d-12b0-4699-ab0e-47b8d23ee3bc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2032377329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3f6d704dd464768953c41d34d34d944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6a6f7bb-6106-4cfd-9aef-b85628d0cefa", "external-id": "nsx-vlan-transportzone-194", "segmentation_id": 194, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e2d4a2d-9a", "ovs_interfaceid": "2e2d4a2d-9a02-4d7e-b369-98b035a79190", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1175.806675] env[70020]: DEBUG oslo_concurrency.lockutils [req-d98c8365-1b4c-4f0c-b8df-68f6cd97fafc req-7750a27c-ce41-4804-9b15-01f50703be55 service nova] Acquired lock "refresh_cache-ca63297c-b7bc-45e9-8850-f46050905c26" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1175.806861] env[70020]: DEBUG nova.network.neutron [req-d98c8365-1b4c-4f0c-b8df-68f6cd97fafc req-7750a27c-ce41-4804-9b15-01f50703be55 service nova] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Refreshing network info cache for port 2e2d4a2d-9a02-4d7e-b369-98b035a79190 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1175.807971] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:41:7e:ca', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd6a6f7bb-6106-4cfd-9aef-b85628d0cefa', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2e2d4a2d-9a02-4d7e-b369-98b035a79190', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1175.815073] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1175.817955] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9694468b-eb56-40f0-a3e1-341e4fec4482 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "85da90b5-c3cc-4e35-8c86-6aca07992a09" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1175.818184] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9694468b-eb56-40f0-a3e1-341e4fec4482 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "85da90b5-c3cc-4e35-8c86-6aca07992a09" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1175.818379] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9694468b-eb56-40f0-a3e1-341e4fec4482 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "85da90b5-c3cc-4e35-8c86-6aca07992a09-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1175.818559] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9694468b-eb56-40f0-a3e1-341e4fec4482 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "85da90b5-c3cc-4e35-8c86-6aca07992a09-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1175.818717] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9694468b-eb56-40f0-a3e1-341e4fec4482 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "85da90b5-c3cc-4e35-8c86-6aca07992a09-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1175.819984] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1175.820641] env[70020]: INFO nova.compute.manager [None req-9694468b-eb56-40f0-a3e1-341e4fec4482 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Terminating instance [ 1175.821738] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7a8e60e9-f74f-4426-805c-3a8c2418edf1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.841831] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1175.841831] env[70020]: value = "task-3619123" [ 1175.841831] env[70020]: _type = "Task" [ 1175.841831] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.849455] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619123, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.851183] env[70020]: DEBUG nova.compute.manager [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1175.911810] env[70020]: DEBUG nova.virt.hardware [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1175.912141] env[70020]: DEBUG nova.virt.hardware [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1175.912298] env[70020]: DEBUG nova.virt.hardware [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1175.912479] env[70020]: DEBUG nova.virt.hardware [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1175.912619] env[70020]: DEBUG nova.virt.hardware [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1175.912758] env[70020]: DEBUG nova.virt.hardware [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1175.912965] env[70020]: DEBUG nova.virt.hardware [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1175.913147] env[70020]: DEBUG nova.virt.hardware [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1175.913315] env[70020]: DEBUG nova.virt.hardware [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1175.913474] env[70020]: DEBUG nova.virt.hardware [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1175.913640] env[70020]: DEBUG nova.virt.hardware [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1175.914509] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef9c2247-337c-4f4a-b6e5-99e7e571c1c7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.922273] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73741c56-9293-49de-af8e-e730653b603f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.948356] env[70020]: DEBUG oslo_vmware.api [None req-80cb339f-3419-42df-a2d5-93b83f3a8b5d tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619122, 'name': ReconfigVM_Task, 'duration_secs': 0.253076} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.948614] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-80cb339f-3419-42df-a2d5-93b83f3a8b5d tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Reconfigured VM instance instance-00000063 to detach disk 2001 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1175.953176] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4d7d0e01-304e-4f29-8d94-6b66bf9e4640 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.968444] env[70020]: DEBUG oslo_vmware.api [None req-80cb339f-3419-42df-a2d5-93b83f3a8b5d tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1175.968444] env[70020]: value = "task-3619124" [ 1175.968444] env[70020]: _type = "Task" [ 1175.968444] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.976248] env[70020]: DEBUG oslo_vmware.api [None req-80cb339f-3419-42df-a2d5-93b83f3a8b5d tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619124, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.090620] env[70020]: DEBUG nova.network.neutron [req-d98c8365-1b4c-4f0c-b8df-68f6cd97fafc req-7750a27c-ce41-4804-9b15-01f50703be55 service nova] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Updated VIF entry in instance network info cache for port 2e2d4a2d-9a02-4d7e-b369-98b035a79190. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1176.090684] env[70020]: DEBUG nova.network.neutron [req-d98c8365-1b4c-4f0c-b8df-68f6cd97fafc req-7750a27c-ce41-4804-9b15-01f50703be55 service nova] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Updating instance_info_cache with network_info: [{"id": "2e2d4a2d-9a02-4d7e-b369-98b035a79190", "address": "fa:16:3e:41:7e:ca", "network": {"id": "c7c6ab1d-12b0-4699-ab0e-47b8d23ee3bc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2032377329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3f6d704dd464768953c41d34d34d944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6a6f7bb-6106-4cfd-9aef-b85628d0cefa", "external-id": "nsx-vlan-transportzone-194", "segmentation_id": 194, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e2d4a2d-9a", "ovs_interfaceid": "2e2d4a2d-9a02-4d7e-b369-98b035a79190", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1176.280492] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 4335f92a-897a-4779-be70-4f0754a66d53] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1176.296891] env[70020]: DEBUG nova.network.neutron [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Successfully updated port: f17543b2-5415-422a-b395-b7aa575543a0 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1176.338327] env[70020]: DEBUG nova.compute.manager [None req-9694468b-eb56-40f0-a3e1-341e4fec4482 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1176.338633] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9694468b-eb56-40f0-a3e1-341e4fec4482 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1176.339903] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad41654-0cb7-4d36-bb0d-77893981f659 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.352986] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619123, 'name': CreateVM_Task, 'duration_secs': 0.353375} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.354868] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1176.355153] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9694468b-eb56-40f0-a3e1-341e4fec4482 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1176.355772] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1176.355946] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1176.356270] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1176.356482] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-de0732a7-fd58-493f-a20e-e29b8f91922c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.357772] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4beecb3-3685-40ed-b011-7785617648ee {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.362218] env[70020]: DEBUG oslo_vmware.api [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1176.362218] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]529d7b63-f3f3-7205-cbef-ba2c47fb632a" [ 1176.362218] env[70020]: _type = "Task" [ 1176.362218] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.368996] env[70020]: DEBUG oslo_vmware.api [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]529d7b63-f3f3-7205-cbef-ba2c47fb632a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.419661] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9694468b-eb56-40f0-a3e1-341e4fec4482 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1176.419894] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9694468b-eb56-40f0-a3e1-341e4fec4482 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1176.420128] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-9694468b-eb56-40f0-a3e1-341e4fec4482 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Deleting the datastore file [datastore1] 85da90b5-c3cc-4e35-8c86-6aca07992a09 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1176.420354] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-54d025a9-4b34-4793-8eff-a5ed1dde87fc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.427218] env[70020]: DEBUG oslo_vmware.api [None req-9694468b-eb56-40f0-a3e1-341e4fec4482 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1176.427218] env[70020]: value = "task-3619126" [ 1176.427218] env[70020]: _type = "Task" [ 1176.427218] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.435049] env[70020]: DEBUG oslo_vmware.api [None req-9694468b-eb56-40f0-a3e1-341e4fec4482 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619126, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.477139] env[70020]: DEBUG oslo_vmware.api [None req-80cb339f-3419-42df-a2d5-93b83f3a8b5d tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619124, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.593396] env[70020]: DEBUG oslo_concurrency.lockutils [req-d98c8365-1b4c-4f0c-b8df-68f6cd97fafc req-7750a27c-ce41-4804-9b15-01f50703be55 service nova] Releasing lock "refresh_cache-ca63297c-b7bc-45e9-8850-f46050905c26" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1176.783657] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: ef85421b-b679-4f38-b052-5695baa2e405] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1176.800502] env[70020]: DEBUG oslo_concurrency.lockutils [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "refresh_cache-1b25f8db-457e-4948-b9da-35e2fa5b897e" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1176.800502] env[70020]: DEBUG oslo_concurrency.lockutils [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquired lock "refresh_cache-1b25f8db-457e-4948-b9da-35e2fa5b897e" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1176.800502] env[70020]: DEBUG nova.network.neutron [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1176.873981] env[70020]: DEBUG oslo_vmware.api [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]529d7b63-f3f3-7205-cbef-ba2c47fb632a, 'name': SearchDatastore_Task, 'duration_secs': 0.009962} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.874291] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1176.874517] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1176.874751] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1176.874895] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1176.875083] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1176.875332] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-50bd7cc9-2594-400f-8c93-42b47b4a0357 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.880972] env[70020]: DEBUG nova.compute.manager [req-ff03c461-9334-4f34-8ba6-51c3b70b57cb req-933b719e-6149-4b71-85a7-8093bad88d23 service nova] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Received event network-vif-plugged-f17543b2-5415-422a-b395-b7aa575543a0 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1176.881526] env[70020]: DEBUG oslo_concurrency.lockutils [req-ff03c461-9334-4f34-8ba6-51c3b70b57cb req-933b719e-6149-4b71-85a7-8093bad88d23 service nova] Acquiring lock "1b25f8db-457e-4948-b9da-35e2fa5b897e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1176.881526] env[70020]: DEBUG oslo_concurrency.lockutils [req-ff03c461-9334-4f34-8ba6-51c3b70b57cb req-933b719e-6149-4b71-85a7-8093bad88d23 service nova] Lock "1b25f8db-457e-4948-b9da-35e2fa5b897e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1176.881624] env[70020]: DEBUG oslo_concurrency.lockutils [req-ff03c461-9334-4f34-8ba6-51c3b70b57cb req-933b719e-6149-4b71-85a7-8093bad88d23 service nova] Lock "1b25f8db-457e-4948-b9da-35e2fa5b897e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1176.881802] env[70020]: DEBUG nova.compute.manager [req-ff03c461-9334-4f34-8ba6-51c3b70b57cb req-933b719e-6149-4b71-85a7-8093bad88d23 service nova] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] No waiting events found dispatching network-vif-plugged-f17543b2-5415-422a-b395-b7aa575543a0 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1176.881908] env[70020]: WARNING nova.compute.manager [req-ff03c461-9334-4f34-8ba6-51c3b70b57cb req-933b719e-6149-4b71-85a7-8093bad88d23 service nova] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Received unexpected event network-vif-plugged-f17543b2-5415-422a-b395-b7aa575543a0 for instance with vm_state building and task_state spawning. [ 1176.882135] env[70020]: DEBUG nova.compute.manager [req-ff03c461-9334-4f34-8ba6-51c3b70b57cb req-933b719e-6149-4b71-85a7-8093bad88d23 service nova] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Received event network-changed-f17543b2-5415-422a-b395-b7aa575543a0 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1176.882305] env[70020]: DEBUG nova.compute.manager [req-ff03c461-9334-4f34-8ba6-51c3b70b57cb req-933b719e-6149-4b71-85a7-8093bad88d23 service nova] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Refreshing instance network info cache due to event network-changed-f17543b2-5415-422a-b395-b7aa575543a0. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1176.882469] env[70020]: DEBUG oslo_concurrency.lockutils [req-ff03c461-9334-4f34-8ba6-51c3b70b57cb req-933b719e-6149-4b71-85a7-8093bad88d23 service nova] Acquiring lock "refresh_cache-1b25f8db-457e-4948-b9da-35e2fa5b897e" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1176.884075] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1176.884283] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1176.885029] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6b63aa0-33b1-4912-93c6-43104286b046 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.890949] env[70020]: DEBUG oslo_vmware.api [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1176.890949] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52805787-ef6c-9333-78f1-ee37abd47014" [ 1176.890949] env[70020]: _type = "Task" [ 1176.890949] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.898115] env[70020]: DEBUG oslo_vmware.api [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52805787-ef6c-9333-78f1-ee37abd47014, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.936260] env[70020]: DEBUG oslo_vmware.api [None req-9694468b-eb56-40f0-a3e1-341e4fec4482 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619126, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.129912} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.936526] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-9694468b-eb56-40f0-a3e1-341e4fec4482 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1176.936735] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9694468b-eb56-40f0-a3e1-341e4fec4482 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1176.936906] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9694468b-eb56-40f0-a3e1-341e4fec4482 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1176.937109] env[70020]: INFO nova.compute.manager [None req-9694468b-eb56-40f0-a3e1-341e4fec4482 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1176.937353] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9694468b-eb56-40f0-a3e1-341e4fec4482 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1176.937533] env[70020]: DEBUG nova.compute.manager [-] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1176.937626] env[70020]: DEBUG nova.network.neutron [-] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1176.977827] env[70020]: DEBUG oslo_vmware.api [None req-80cb339f-3419-42df-a2d5-93b83f3a8b5d tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619124, 'name': ReconfigVM_Task, 'duration_secs': 0.770715} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.978133] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-80cb339f-3419-42df-a2d5-93b83f3a8b5d tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721804', 'volume_id': '1acb6a62-8f9b-4b43-be82-4306b549a1ba', 'name': 'volume-1acb6a62-8f9b-4b43-be82-4306b549a1ba', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8dbb1de0-38de-493f-9512-b8754bab7bcb', 'attached_at': '', 'detached_at': '', 'volume_id': '1acb6a62-8f9b-4b43-be82-4306b549a1ba', 'serial': '1acb6a62-8f9b-4b43-be82-4306b549a1ba'} {{(pid=70020) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1177.286631] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 24184767-92f7-48b3-bbad-16a596ececde] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1177.336683] env[70020]: DEBUG nova.network.neutron [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1177.400717] env[70020]: DEBUG oslo_vmware.api [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52805787-ef6c-9333-78f1-ee37abd47014, 'name': SearchDatastore_Task, 'duration_secs': 0.008583} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.403805] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62d513d4-f868-4e75-854a-1f013998462f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.409653] env[70020]: DEBUG oslo_vmware.api [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1177.409653] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5282ed70-9291-5a68-5b98-40db26fe1aea" [ 1177.409653] env[70020]: _type = "Task" [ 1177.409653] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.417434] env[70020]: DEBUG oslo_vmware.api [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5282ed70-9291-5a68-5b98-40db26fe1aea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.469290] env[70020]: DEBUG nova.network.neutron [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Updating instance_info_cache with network_info: [{"id": "f17543b2-5415-422a-b395-b7aa575543a0", "address": "fa:16:3e:d0:47:2c", "network": {"id": "405de05e-83d0-46c5-9397-bb1ba00f7ab7", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-202335876-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16f59a8f930846ec9299416b9ec5dd48", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf17543b2-54", "ovs_interfaceid": "f17543b2-5415-422a-b395-b7aa575543a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1177.524027] env[70020]: DEBUG nova.objects.instance [None req-80cb339f-3419-42df-a2d5-93b83f3a8b5d tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lazy-loading 'flavor' on Instance uuid 8dbb1de0-38de-493f-9512-b8754bab7bcb {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1177.774027] env[70020]: DEBUG nova.network.neutron [-] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1177.789904] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 8bff6907-c2b0-4ad1-9298-b2d622d33fde] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1177.920608] env[70020]: DEBUG oslo_vmware.api [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5282ed70-9291-5a68-5b98-40db26fe1aea, 'name': SearchDatastore_Task, 'duration_secs': 0.009699} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.920879] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1177.921150] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] ca63297c-b7bc-45e9-8850-f46050905c26/ca63297c-b7bc-45e9-8850-f46050905c26.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1177.921403] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b46eaaa7-f926-4702-8a8e-faa2d306f13a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.927996] env[70020]: DEBUG oslo_vmware.api [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1177.927996] env[70020]: value = "task-3619127" [ 1177.927996] env[70020]: _type = "Task" [ 1177.927996] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.935731] env[70020]: DEBUG oslo_vmware.api [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619127, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.971915] env[70020]: DEBUG oslo_concurrency.lockutils [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Releasing lock "refresh_cache-1b25f8db-457e-4948-b9da-35e2fa5b897e" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1177.972270] env[70020]: DEBUG nova.compute.manager [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Instance network_info: |[{"id": "f17543b2-5415-422a-b395-b7aa575543a0", "address": "fa:16:3e:d0:47:2c", "network": {"id": "405de05e-83d0-46c5-9397-bb1ba00f7ab7", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-202335876-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16f59a8f930846ec9299416b9ec5dd48", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf17543b2-54", "ovs_interfaceid": "f17543b2-5415-422a-b395-b7aa575543a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1177.972582] env[70020]: DEBUG oslo_concurrency.lockutils [req-ff03c461-9334-4f34-8ba6-51c3b70b57cb req-933b719e-6149-4b71-85a7-8093bad88d23 service nova] Acquired lock "refresh_cache-1b25f8db-457e-4948-b9da-35e2fa5b897e" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1177.972780] env[70020]: DEBUG nova.network.neutron [req-ff03c461-9334-4f34-8ba6-51c3b70b57cb req-933b719e-6149-4b71-85a7-8093bad88d23 service nova] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Refreshing network info cache for port f17543b2-5415-422a-b395-b7aa575543a0 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1177.973989] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d0:47:2c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f01bbee7-8b9a-46be-891e-59d8142fb359', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f17543b2-5415-422a-b395-b7aa575543a0', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1177.982738] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1177.983739] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1177.983964] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-74e9d1d4-1b30-4ecb-b0a8-4178dffdd662 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.005438] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1178.005438] env[70020]: value = "task-3619128" [ 1178.005438] env[70020]: _type = "Task" [ 1178.005438] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.016219] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619128, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.276078] env[70020]: INFO nova.compute.manager [-] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Took 1.34 seconds to deallocate network for instance. [ 1178.292636] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: ef0d716a-080e-4167-bd34-b2c660b95c88] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1178.438830] env[70020]: DEBUG oslo_vmware.api [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619127, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.433284} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.439019] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] ca63297c-b7bc-45e9-8850-f46050905c26/ca63297c-b7bc-45e9-8850-f46050905c26.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1178.439168] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1178.439486] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-089eceb2-85d3-4740-9243-ff51a7d8c031 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.446257] env[70020]: DEBUG oslo_vmware.api [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1178.446257] env[70020]: value = "task-3619129" [ 1178.446257] env[70020]: _type = "Task" [ 1178.446257] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.454454] env[70020]: DEBUG oslo_vmware.api [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619129, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.515672] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619128, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.531074] env[70020]: DEBUG oslo_concurrency.lockutils [None req-80cb339f-3419-42df-a2d5-93b83f3a8b5d tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "8dbb1de0-38de-493f-9512-b8754bab7bcb" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.743s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1178.746453] env[70020]: DEBUG nova.network.neutron [req-ff03c461-9334-4f34-8ba6-51c3b70b57cb req-933b719e-6149-4b71-85a7-8093bad88d23 service nova] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Updated VIF entry in instance network info cache for port f17543b2-5415-422a-b395-b7aa575543a0. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1178.746800] env[70020]: DEBUG nova.network.neutron [req-ff03c461-9334-4f34-8ba6-51c3b70b57cb req-933b719e-6149-4b71-85a7-8093bad88d23 service nova] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Updating instance_info_cache with network_info: [{"id": "f17543b2-5415-422a-b395-b7aa575543a0", "address": "fa:16:3e:d0:47:2c", "network": {"id": "405de05e-83d0-46c5-9397-bb1ba00f7ab7", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-202335876-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16f59a8f930846ec9299416b9ec5dd48", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf17543b2-54", "ovs_interfaceid": "f17543b2-5415-422a-b395-b7aa575543a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1178.783797] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9694468b-eb56-40f0-a3e1-341e4fec4482 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1178.784287] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9694468b-eb56-40f0-a3e1-341e4fec4482 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1178.784533] env[70020]: DEBUG nova.objects.instance [None req-9694468b-eb56-40f0-a3e1-341e4fec4482 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lazy-loading 'resources' on Instance uuid 85da90b5-c3cc-4e35-8c86-6aca07992a09 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1178.796537] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: f01c1dbf-d6d8-4bf8-b0f8-3efc492a9bd1] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1178.908642] env[70020]: DEBUG nova.compute.manager [req-c8ffadbf-a650-4976-834d-35da2369d21b req-689bd6a1-beb0-46cd-b516-c0285022d65d service nova] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Received event network-vif-deleted-fdf42d56-b2c8-46af-be73-f38818a96a46 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1178.956972] env[70020]: DEBUG oslo_vmware.api [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619129, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064017} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.957298] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1178.958108] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5d17f47-c6d2-45d3-9cac-01c191905002 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.981698] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] ca63297c-b7bc-45e9-8850-f46050905c26/ca63297c-b7bc-45e9-8850-f46050905c26.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1178.982029] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0529e2f3-a833-4ab4-a22b-b7654ccde48b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.003680] env[70020]: DEBUG oslo_vmware.api [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1179.003680] env[70020]: value = "task-3619130" [ 1179.003680] env[70020]: _type = "Task" [ 1179.003680] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.012461] env[70020]: DEBUG oslo_vmware.api [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619130, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.017633] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619128, 'name': CreateVM_Task, 'duration_secs': 0.524662} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.017755] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1179.018377] env[70020]: DEBUG oslo_concurrency.lockutils [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1179.018549] env[70020]: DEBUG oslo_concurrency.lockutils [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1179.018860] env[70020]: DEBUG oslo_concurrency.lockutils [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1179.019124] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f42b1e98-703b-4ba7-b1f2-df8a79206d80 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.023958] env[70020]: DEBUG oslo_vmware.api [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 1179.023958] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52df0cc6-eb14-a5f8-5b7d-12a485207cd2" [ 1179.023958] env[70020]: _type = "Task" [ 1179.023958] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.032523] env[70020]: DEBUG oslo_vmware.api [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52df0cc6-eb14-a5f8-5b7d-12a485207cd2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.145321] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "8dbb1de0-38de-493f-9512-b8754bab7bcb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1179.145566] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "8dbb1de0-38de-493f-9512-b8754bab7bcb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1179.145822] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "8dbb1de0-38de-493f-9512-b8754bab7bcb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1179.146086] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "8dbb1de0-38de-493f-9512-b8754bab7bcb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1179.146284] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "8dbb1de0-38de-493f-9512-b8754bab7bcb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1179.148495] env[70020]: INFO nova.compute.manager [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Terminating instance [ 1179.251589] env[70020]: DEBUG oslo_concurrency.lockutils [req-ff03c461-9334-4f34-8ba6-51c3b70b57cb req-933b719e-6149-4b71-85a7-8093bad88d23 service nova] Releasing lock "refresh_cache-1b25f8db-457e-4948-b9da-35e2fa5b897e" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1179.299321] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: b53f55c1-1867-410c-9c53-f552ff30d697] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1179.484418] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10bc5fdf-9a2c-4159-a70e-d951ea313b6c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.491655] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd800752-ef83-4c04-a42f-5e07dfadc092 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.525890] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7ae5a7e-c330-4c04-a78f-e9d10837c3f1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.535478] env[70020]: DEBUG oslo_vmware.api [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619130, 'name': ReconfigVM_Task, 'duration_secs': 0.436754} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.540467] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Reconfigured VM instance instance-00000072 to attach disk [datastore1] ca63297c-b7bc-45e9-8850-f46050905c26/ca63297c-b7bc-45e9-8850-f46050905c26.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1179.541104] env[70020]: DEBUG oslo_vmware.api [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52df0cc6-eb14-a5f8-5b7d-12a485207cd2, 'name': SearchDatastore_Task, 'duration_secs': 0.010486} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.541347] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-36619391-2b2f-45e2-96c5-8322db639371 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.542784] env[70020]: DEBUG oslo_concurrency.lockutils [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1179.543024] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1179.543259] env[70020]: DEBUG oslo_concurrency.lockutils [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1179.543402] env[70020]: DEBUG oslo_concurrency.lockutils [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1179.543573] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1179.544739] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f532e46-4cde-4650-beba-495bed6848ea {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.548451] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a156f51b-b9b4-492f-8b2a-66748f9b3849 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.560862] env[70020]: DEBUG nova.compute.provider_tree [None req-9694468b-eb56-40f0-a3e1-341e4fec4482 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1179.564788] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1179.564788] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1179.565210] env[70020]: DEBUG oslo_vmware.api [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1179.565210] env[70020]: value = "task-3619131" [ 1179.565210] env[70020]: _type = "Task" [ 1179.565210] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.565419] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50af9a7d-a8ab-4cd6-afb3-d59b005601e7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.573559] env[70020]: DEBUG oslo_vmware.api [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 1179.573559] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]520f2028-6f98-51fe-c9eb-2085c2f8468f" [ 1179.573559] env[70020]: _type = "Task" [ 1179.573559] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.577232] env[70020]: DEBUG oslo_vmware.api [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619131, 'name': Rename_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.586250] env[70020]: DEBUG oslo_vmware.api [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]520f2028-6f98-51fe-c9eb-2085c2f8468f, 'name': SearchDatastore_Task, 'duration_secs': 0.010032} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.586953] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8bb1a299-cff0-458b-9298-a006a4b9209b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.591864] env[70020]: DEBUG oslo_vmware.api [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 1179.591864] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]522d1c42-2e19-3e2b-33c9-e450ed231aef" [ 1179.591864] env[70020]: _type = "Task" [ 1179.591864] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.600138] env[70020]: DEBUG oslo_vmware.api [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]522d1c42-2e19-3e2b-33c9-e450ed231aef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.652929] env[70020]: DEBUG nova.compute.manager [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1179.653140] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1179.654013] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee60ff07-f446-43ff-ba0d-b58c4049fd47 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.661310] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1179.661562] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f07fb1fb-6c75-47a3-9742-a4c9046e9e01 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.668190] env[70020]: DEBUG oslo_vmware.api [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1179.668190] env[70020]: value = "task-3619132" [ 1179.668190] env[70020]: _type = "Task" [ 1179.668190] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.676755] env[70020]: DEBUG oslo_vmware.api [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619132, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.803211] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 5c216231-afc5-41df-a243-bb2a17c20bfe] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1180.078231] env[70020]: DEBUG oslo_vmware.api [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619131, 'name': Rename_Task, 'duration_secs': 0.134366} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.078570] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1180.079915] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-705707fc-2589-4a63-8482-be64c2488d23 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.087123] env[70020]: DEBUG oslo_vmware.api [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1180.087123] env[70020]: value = "task-3619133" [ 1180.087123] env[70020]: _type = "Task" [ 1180.087123] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.094994] env[70020]: DEBUG oslo_vmware.api [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619133, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.101265] env[70020]: DEBUG oslo_vmware.api [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]522d1c42-2e19-3e2b-33c9-e450ed231aef, 'name': SearchDatastore_Task, 'duration_secs': 0.009014} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.101463] env[70020]: DEBUG oslo_concurrency.lockutils [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1180.101719] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 1b25f8db-457e-4948-b9da-35e2fa5b897e/1b25f8db-457e-4948-b9da-35e2fa5b897e.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1180.101965] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6ead69f3-1db1-4c61-b3a5-a08412e7cb02 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.108358] env[70020]: DEBUG oslo_vmware.api [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 1180.108358] env[70020]: value = "task-3619134" [ 1180.108358] env[70020]: _type = "Task" [ 1180.108358] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.111662] env[70020]: DEBUG nova.scheduler.client.report [None req-9694468b-eb56-40f0-a3e1-341e4fec4482 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Updated inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with generation 167 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1180.111829] env[70020]: DEBUG nova.compute.provider_tree [None req-9694468b-eb56-40f0-a3e1-341e4fec4482 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Updating resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d generation from 167 to 168 during operation: update_inventory {{(pid=70020) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1180.112011] env[70020]: DEBUG nova.compute.provider_tree [None req-9694468b-eb56-40f0-a3e1-341e4fec4482 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1180.120520] env[70020]: DEBUG oslo_vmware.api [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3619134, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.177783] env[70020]: DEBUG oslo_vmware.api [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619132, 'name': PowerOffVM_Task, 'duration_secs': 0.179723} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.178070] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1180.178243] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1180.178492] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e3de75b9-0e6c-4050-aaa9-07d30867e6df {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.307342] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: f16d60a4-5f80-4f41-b994-068de48775ad] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1180.595670] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1180.596011] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1180.596242] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Deleting the datastore file [datastore1] 8dbb1de0-38de-493f-9512-b8754bab7bcb {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1180.599698] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-23fe4fe4-0985-4e5d-8874-582498d03836 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.601439] env[70020]: DEBUG oslo_vmware.api [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619133, 'name': PowerOnVM_Task} progress is 88%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.605418] env[70020]: DEBUG oslo_vmware.api [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1180.605418] env[70020]: value = "task-3619136" [ 1180.605418] env[70020]: _type = "Task" [ 1180.605418] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.617518] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9694468b-eb56-40f0-a3e1-341e4fec4482 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.833s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1180.619489] env[70020]: DEBUG oslo_vmware.api [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619136, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.622492] env[70020]: DEBUG oslo_vmware.api [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3619134, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.48602} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.622754] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 1b25f8db-457e-4948-b9da-35e2fa5b897e/1b25f8db-457e-4948-b9da-35e2fa5b897e.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1180.623029] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1180.623276] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8536b25c-7ed0-4e01-bfb3-8806af0d2115 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.629361] env[70020]: DEBUG oslo_vmware.api [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 1180.629361] env[70020]: value = "task-3619137" [ 1180.629361] env[70020]: _type = "Task" [ 1180.629361] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.637271] env[70020]: DEBUG oslo_vmware.api [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3619137, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.670533] env[70020]: INFO nova.scheduler.client.report [None req-9694468b-eb56-40f0-a3e1-341e4fec4482 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Deleted allocations for instance 85da90b5-c3cc-4e35-8c86-6aca07992a09 [ 1180.811529] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: f56e88f6-3a25-44d9-bdb1-cc4291169c9c] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1181.097029] env[70020]: DEBUG oslo_vmware.api [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619133, 'name': PowerOnVM_Task, 'duration_secs': 0.665382} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.097275] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1181.097481] env[70020]: INFO nova.compute.manager [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Took 7.61 seconds to spawn the instance on the hypervisor. [ 1181.097657] env[70020]: DEBUG nova.compute.manager [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1181.098432] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d281dd48-cc15-4596-973a-cd9cc5c37fd3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.115407] env[70020]: DEBUG oslo_vmware.api [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619136, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.284939} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.115648] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1181.115822] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1181.116063] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1181.116276] env[70020]: INFO nova.compute.manager [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Took 1.46 seconds to destroy the instance on the hypervisor. [ 1181.116514] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1181.116699] env[70020]: DEBUG nova.compute.manager [-] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1181.116839] env[70020]: DEBUG nova.network.neutron [-] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1181.138247] env[70020]: DEBUG oslo_vmware.api [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3619137, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078847} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.138557] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1181.139351] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7361fe98-89de-4961-ab93-478a786e0647 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.161730] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] 1b25f8db-457e-4948-b9da-35e2fa5b897e/1b25f8db-457e-4948-b9da-35e2fa5b897e.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1181.162033] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d71c7cec-5d6e-4df4-80cb-ab7c214b2d49 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.182885] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9694468b-eb56-40f0-a3e1-341e4fec4482 tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "85da90b5-c3cc-4e35-8c86-6aca07992a09" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.363s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1181.186092] env[70020]: DEBUG oslo_vmware.api [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 1181.186092] env[70020]: value = "task-3619138" [ 1181.186092] env[70020]: _type = "Task" [ 1181.186092] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.195039] env[70020]: DEBUG oslo_vmware.api [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3619138, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.314682] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 2198e7f8-5458-4b97-abb3-0a3c932cebc2] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1181.619033] env[70020]: INFO nova.compute.manager [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Took 13.64 seconds to build instance. [ 1181.699553] env[70020]: DEBUG oslo_vmware.api [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3619138, 'name': ReconfigVM_Task, 'duration_secs': 0.360688} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.699885] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Reconfigured VM instance instance-00000073 to attach disk [datastore1] 1b25f8db-457e-4948-b9da-35e2fa5b897e/1b25f8db-457e-4948-b9da-35e2fa5b897e.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1181.700545] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cf6098ac-5d1c-4c6f-8b6e-ed67d9507b44 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.707418] env[70020]: DEBUG oslo_vmware.api [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 1181.707418] env[70020]: value = "task-3619139" [ 1181.707418] env[70020]: _type = "Task" [ 1181.707418] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.715398] env[70020]: DEBUG oslo_vmware.api [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3619139, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.755646] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c5dd6104-1e19-4d67-9b64-6584a241809a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "13f6daa5-d859-40ed-b1b0-edd7717b8df3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1181.755903] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c5dd6104-1e19-4d67-9b64-6584a241809a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "13f6daa5-d859-40ed-b1b0-edd7717b8df3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1181.756193] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c5dd6104-1e19-4d67-9b64-6584a241809a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "13f6daa5-d859-40ed-b1b0-edd7717b8df3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1181.756381] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c5dd6104-1e19-4d67-9b64-6584a241809a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "13f6daa5-d859-40ed-b1b0-edd7717b8df3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1181.756546] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c5dd6104-1e19-4d67-9b64-6584a241809a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "13f6daa5-d859-40ed-b1b0-edd7717b8df3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1181.758849] env[70020]: INFO nova.compute.manager [None req-c5dd6104-1e19-4d67-9b64-6584a241809a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Terminating instance [ 1181.818343] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: c9ce57f3-f9a2-40aa-b7eb-403840c34304] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1181.898955] env[70020]: DEBUG nova.compute.manager [req-4b06087c-9d03-4f95-8bd4-386f80f5c54e req-f0c7b696-deca-42a4-b9d6-3ad4e87b78f4 service nova] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Received event network-vif-deleted-8e1b8b9c-b1c2-448e-8d9c-621c1810194a {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1181.899721] env[70020]: INFO nova.compute.manager [req-4b06087c-9d03-4f95-8bd4-386f80f5c54e req-f0c7b696-deca-42a4-b9d6-3ad4e87b78f4 service nova] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Neutron deleted interface 8e1b8b9c-b1c2-448e-8d9c-621c1810194a; detaching it from the instance and deleting it from the info cache [ 1181.899721] env[70020]: DEBUG nova.network.neutron [req-4b06087c-9d03-4f95-8bd4-386f80f5c54e req-f0c7b696-deca-42a4-b9d6-3ad4e87b78f4 service nova] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1182.121784] env[70020]: DEBUG oslo_concurrency.lockutils [None req-73bee47a-40b1-4ec8-9bf8-0dd38d09980c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "ca63297c-b7bc-45e9-8850-f46050905c26" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.150s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1182.219885] env[70020]: DEBUG oslo_vmware.api [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3619139, 'name': Rename_Task, 'duration_secs': 0.226078} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.220422] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1182.220796] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cc1640d6-d2b9-4917-b273-85a80a715b2b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.228035] env[70020]: DEBUG oslo_vmware.api [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 1182.228035] env[70020]: value = "task-3619140" [ 1182.228035] env[70020]: _type = "Task" [ 1182.228035] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.236559] env[70020]: DEBUG oslo_vmware.api [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3619140, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.265237] env[70020]: DEBUG nova.compute.manager [None req-c5dd6104-1e19-4d67-9b64-6584a241809a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1182.265237] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c5dd6104-1e19-4d67-9b64-6584a241809a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1182.265237] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1aacf75-e34d-4f77-af6f-6e14d5edffb6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.273272] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5dd6104-1e19-4d67-9b64-6584a241809a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1182.273768] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d440ccd1-b179-44b5-8423-bb1c370e55e9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.280310] env[70020]: DEBUG oslo_vmware.api [None req-c5dd6104-1e19-4d67-9b64-6584a241809a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1182.280310] env[70020]: value = "task-3619141" [ 1182.280310] env[70020]: _type = "Task" [ 1182.280310] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.289314] env[70020]: DEBUG oslo_vmware.api [None req-c5dd6104-1e19-4d67-9b64-6584a241809a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619141, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.322111] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 6f2bc97b-0f0a-4f16-b41c-7af96130783f] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1182.342977] env[70020]: DEBUG nova.network.neutron [-] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1182.403738] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c4379074-78ae-48b5-b11d-4e81bf2714cd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.414710] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2affed49-f376-4606-81d5-efc70a35b91a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.453520] env[70020]: DEBUG nova.compute.manager [req-4b06087c-9d03-4f95-8bd4-386f80f5c54e req-f0c7b696-deca-42a4-b9d6-3ad4e87b78f4 service nova] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Detach interface failed, port_id=8e1b8b9c-b1c2-448e-8d9c-621c1810194a, reason: Instance 8dbb1de0-38de-493f-9512-b8754bab7bcb could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1182.742024] env[70020]: DEBUG oslo_vmware.api [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3619140, 'name': PowerOnVM_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.792071] env[70020]: DEBUG oslo_vmware.api [None req-c5dd6104-1e19-4d67-9b64-6584a241809a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619141, 'name': PowerOffVM_Task, 'duration_secs': 0.296635} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.793287] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5dd6104-1e19-4d67-9b64-6584a241809a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1182.793515] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c5dd6104-1e19-4d67-9b64-6584a241809a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1182.793804] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2f78e75a-e085-46d0-9333-1778ab574a3a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.825361] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 16c45b86-317a-4d0c-a402-51c85af37a5b] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1182.847354] env[70020]: INFO nova.compute.manager [-] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Took 1.73 seconds to deallocate network for instance. [ 1182.861145] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c5dd6104-1e19-4d67-9b64-6584a241809a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1182.861492] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c5dd6104-1e19-4d67-9b64-6584a241809a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1182.861753] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5dd6104-1e19-4d67-9b64-6584a241809a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Deleting the datastore file [datastore1] 13f6daa5-d859-40ed-b1b0-edd7717b8df3 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1182.862107] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-af176760-5458-45d7-8373-208d57bce825 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.870200] env[70020]: DEBUG oslo_vmware.api [None req-c5dd6104-1e19-4d67-9b64-6584a241809a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for the task: (returnval){ [ 1182.870200] env[70020]: value = "task-3619143" [ 1182.870200] env[70020]: _type = "Task" [ 1182.870200] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.880281] env[70020]: DEBUG oslo_vmware.api [None req-c5dd6104-1e19-4d67-9b64-6584a241809a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619143, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.246879] env[70020]: DEBUG oslo_vmware.api [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3619140, 'name': PowerOnVM_Task, 'duration_secs': 0.786817} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.246879] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1183.247212] env[70020]: INFO nova.compute.manager [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Took 7.40 seconds to spawn the instance on the hypervisor. [ 1183.247462] env[70020]: DEBUG nova.compute.manager [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1183.248305] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-539daded-14cf-4819-ad57-a6523200e189 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.330072] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 8adadb2e-2a20-45b1-bed8-34e09df25f39] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1183.355031] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1183.355178] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1183.355437] env[70020]: DEBUG nova.objects.instance [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lazy-loading 'resources' on Instance uuid 8dbb1de0-38de-493f-9512-b8754bab7bcb {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1183.385018] env[70020]: DEBUG oslo_vmware.api [None req-c5dd6104-1e19-4d67-9b64-6584a241809a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Task: {'id': task-3619143, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.243719} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.385018] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5dd6104-1e19-4d67-9b64-6584a241809a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1183.385018] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c5dd6104-1e19-4d67-9b64-6584a241809a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1183.385018] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c5dd6104-1e19-4d67-9b64-6584a241809a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1183.385018] env[70020]: INFO nova.compute.manager [None req-c5dd6104-1e19-4d67-9b64-6584a241809a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1183.385018] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c5dd6104-1e19-4d67-9b64-6584a241809a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1183.385018] env[70020]: DEBUG nova.compute.manager [-] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1183.385018] env[70020]: DEBUG nova.network.neutron [-] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1183.776738] env[70020]: INFO nova.compute.manager [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Took 14.15 seconds to build instance. [ 1183.839344] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 0caa6acd-29d4-43ee-8b32-5149462dfc1c] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1184.036305] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75615dec-1d95-420e-86b6-58f6b365ffc9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.051049] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f47d28a-0e38-40c4-b823-aefc7b74425e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.080640] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e15d420-57f7-4c46-b540-129a1d55bffd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.083502] env[70020]: DEBUG nova.compute.manager [req-d3a3efa9-f653-4ce3-aecf-fb05b9aa4969 req-63370773-8065-4997-a063-bfcfa0ba3a27 service nova] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Received event network-vif-deleted-347b7e73-55ed-4f2b-96f7-96ab25367148 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1184.083735] env[70020]: INFO nova.compute.manager [req-d3a3efa9-f653-4ce3-aecf-fb05b9aa4969 req-63370773-8065-4997-a063-bfcfa0ba3a27 service nova] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Neutron deleted interface 347b7e73-55ed-4f2b-96f7-96ab25367148; detaching it from the instance and deleting it from the info cache [ 1184.083934] env[70020]: DEBUG nova.network.neutron [req-d3a3efa9-f653-4ce3-aecf-fb05b9aa4969 req-63370773-8065-4997-a063-bfcfa0ba3a27 service nova] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1184.091338] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a49fc3d-4dd5-4136-8a73-2559212dda16 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.106112] env[70020]: DEBUG nova.compute.provider_tree [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1184.142023] env[70020]: DEBUG nova.compute.manager [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Stashing vm_state: active {{(pid=70020) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1184.281848] env[70020]: DEBUG oslo_concurrency.lockutils [None req-15729399-d26c-4338-8ee5-019c58e0c012 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "1b25f8db-457e-4948-b9da-35e2fa5b897e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.661s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1184.292019] env[70020]: DEBUG nova.network.neutron [-] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1184.344457] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 4b5750d4-98ec-4c70-b214-fad97060b606] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1184.587618] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e0dbff16-e213-47cc-9136-6be5df4dcd17 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.598123] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c5cecd9-87b4-4575-b1e4-399f8502393d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.635346] env[70020]: DEBUG nova.compute.manager [req-d3a3efa9-f653-4ce3-aecf-fb05b9aa4969 req-63370773-8065-4997-a063-bfcfa0ba3a27 service nova] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Detach interface failed, port_id=347b7e73-55ed-4f2b-96f7-96ab25367148, reason: Instance 13f6daa5-d859-40ed-b1b0-edd7717b8df3 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1184.640833] env[70020]: ERROR nova.scheduler.client.report [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [req-77b94a18-7791-400b-b1f5-22aaba0c31cd] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-77b94a18-7791-400b-b1f5-22aaba0c31cd"}]} [ 1184.661182] env[70020]: DEBUG nova.scheduler.client.report [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1184.680138] env[70020]: DEBUG nova.scheduler.client.report [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1184.680138] env[70020]: DEBUG nova.compute.provider_tree [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1184.681129] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1184.691183] env[70020]: DEBUG nova.scheduler.client.report [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1184.721433] env[70020]: DEBUG nova.scheduler.client.report [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1184.794975] env[70020]: INFO nova.compute.manager [-] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Took 1.41 seconds to deallocate network for instance. [ 1184.847673] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: bc57657e-99e8-46b8-9731-ddd4864a3114] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1184.903456] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fab5d36-9674-4043-ba02-8e85ff390cfb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.912715] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9ee3246-7f69-4279-aab2-4e599386e5cc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.921144] env[70020]: DEBUG nova.compute.manager [req-7cdb4431-29f0-4142-a876-8d79c578e100 req-461ce0f7-3e2f-45e3-9a81-fd0b0a0d6076 service nova] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Received event network-changed-f17543b2-5415-422a-b395-b7aa575543a0 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1184.921318] env[70020]: DEBUG nova.compute.manager [req-7cdb4431-29f0-4142-a876-8d79c578e100 req-461ce0f7-3e2f-45e3-9a81-fd0b0a0d6076 service nova] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Refreshing instance network info cache due to event network-changed-f17543b2-5415-422a-b395-b7aa575543a0. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1184.921536] env[70020]: DEBUG oslo_concurrency.lockutils [req-7cdb4431-29f0-4142-a876-8d79c578e100 req-461ce0f7-3e2f-45e3-9a81-fd0b0a0d6076 service nova] Acquiring lock "refresh_cache-1b25f8db-457e-4948-b9da-35e2fa5b897e" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1184.921676] env[70020]: DEBUG oslo_concurrency.lockutils [req-7cdb4431-29f0-4142-a876-8d79c578e100 req-461ce0f7-3e2f-45e3-9a81-fd0b0a0d6076 service nova] Acquired lock "refresh_cache-1b25f8db-457e-4948-b9da-35e2fa5b897e" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1184.921834] env[70020]: DEBUG nova.network.neutron [req-7cdb4431-29f0-4142-a876-8d79c578e100 req-461ce0f7-3e2f-45e3-9a81-fd0b0a0d6076 service nova] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Refreshing network info cache for port f17543b2-5415-422a-b395-b7aa575543a0 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1184.954094] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6686c055-14a7-4659-9ff1-ecb026cc81cb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.963629] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f303dd6-76fb-4a83-8ea3-17197e6dfbf6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.980472] env[70020]: DEBUG nova.compute.provider_tree [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1185.250593] env[70020]: DEBUG nova.network.neutron [req-7cdb4431-29f0-4142-a876-8d79c578e100 req-461ce0f7-3e2f-45e3-9a81-fd0b0a0d6076 service nova] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Updated VIF entry in instance network info cache for port f17543b2-5415-422a-b395-b7aa575543a0. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1185.251251] env[70020]: DEBUG nova.network.neutron [req-7cdb4431-29f0-4142-a876-8d79c578e100 req-461ce0f7-3e2f-45e3-9a81-fd0b0a0d6076 service nova] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Updating instance_info_cache with network_info: [{"id": "f17543b2-5415-422a-b395-b7aa575543a0", "address": "fa:16:3e:d0:47:2c", "network": {"id": "405de05e-83d0-46c5-9397-bb1ba00f7ab7", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-202335876-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16f59a8f930846ec9299416b9ec5dd48", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf17543b2-54", "ovs_interfaceid": "f17543b2-5415-422a-b395-b7aa575543a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1185.302330] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c5dd6104-1e19-4d67-9b64-6584a241809a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1185.353740] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: ea97f6ab-057e-44d3-835a-68b46d241621] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1185.505108] env[70020]: ERROR nova.scheduler.client.report [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [req-79520250-f2a2-482c-8cf7-07aaad838f20] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-79520250-f2a2-482c-8cf7-07aaad838f20"}]} [ 1185.524110] env[70020]: DEBUG nova.scheduler.client.report [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1185.545687] env[70020]: DEBUG nova.scheduler.client.report [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1185.546041] env[70020]: DEBUG nova.compute.provider_tree [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1185.571025] env[70020]: DEBUG nova.scheduler.client.report [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1185.614925] env[70020]: DEBUG nova.scheduler.client.report [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1185.726359] env[70020]: DEBUG oslo_concurrency.lockutils [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Acquiring lock "c29d577e-9498-40b1-8e49-caff821cb80a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1185.726640] env[70020]: DEBUG oslo_concurrency.lockutils [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Lock "c29d577e-9498-40b1-8e49-caff821cb80a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1185.755729] env[70020]: DEBUG oslo_concurrency.lockutils [req-7cdb4431-29f0-4142-a876-8d79c578e100 req-461ce0f7-3e2f-45e3-9a81-fd0b0a0d6076 service nova] Releasing lock "refresh_cache-1b25f8db-457e-4948-b9da-35e2fa5b897e" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1185.794920] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5920b8b-cfe6-4538-a5a8-16192ef65604 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.802909] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02068384-f253-41f6-9a65-0edd4ae73fd9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.833275] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb98bab5-27b0-4abe-a712-db58e04e56a6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.843470] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcc0530f-1e6a-488d-bbb3-7fbf87568265 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.859056] env[70020]: DEBUG nova.compute.provider_tree [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1185.860372] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1185.861237] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Cleaning up deleted instances with incomplete migration {{(pid=70020) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11872}} [ 1186.231852] env[70020]: DEBUG nova.compute.manager [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1186.364710] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1186.379037] env[70020]: ERROR nova.scheduler.client.report [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [req-7177ffbe-b9b1-4a06-a856-b79534f0addd] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-7177ffbe-b9b1-4a06-a856-b79534f0addd"}]} [ 1186.395669] env[70020]: DEBUG nova.scheduler.client.report [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1186.407585] env[70020]: DEBUG nova.scheduler.client.report [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1186.407793] env[70020]: DEBUG nova.compute.provider_tree [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1186.419510] env[70020]: DEBUG nova.scheduler.client.report [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1186.434259] env[70020]: DEBUG nova.scheduler.client.report [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1186.586764] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44f282fc-d528-4e13-8aa9-dbfe34d02217 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.594517] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c9ef865-f351-4304-ad55-aa811e958e38 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.623552] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3065c34-58c9-4b30-9877-a1271a4701d4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.631602] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5becf3d7-e164-4912-ae1a-0096eeba0867 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.320768] env[70020]: DEBUG nova.compute.provider_tree [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1187.326226] env[70020]: DEBUG nova.compute.manager [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Stashing vm_state: active {{(pid=70020) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1187.344650] env[70020]: DEBUG oslo_concurrency.lockutils [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1187.629682] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._sync_power_states {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1187.842597] env[70020]: DEBUG oslo_concurrency.lockutils [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1187.852922] env[70020]: DEBUG nova.scheduler.client.report [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Updated inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with generation 173 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1187.853187] env[70020]: DEBUG nova.compute.provider_tree [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Updating resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d generation from 173 to 174 during operation: update_inventory {{(pid=70020) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1187.853363] env[70020]: DEBUG nova.compute.provider_tree [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1188.136853] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Getting list of instances from cluster (obj){ [ 1188.136853] env[70020]: value = "domain-c8" [ 1188.136853] env[70020]: _type = "ClusterComputeResource" [ 1188.136853] env[70020]: } {{(pid=70020) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1188.136853] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8cfb6d4-5841-4085-891d-c12024821e6d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.156539] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Got total of 8 instances {{(pid=70020) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1188.156539] env[70020]: WARNING nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] While synchronizing instance power states, found 10 instances in the database and 8 instances on the hypervisor. [ 1188.156539] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Triggering sync for uuid 13f6daa5-d859-40ed-b1b0-edd7717b8df3 {{(pid=70020) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1188.156736] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Triggering sync for uuid 96966bf2-a9ff-48ba-be3f-c767e7b6eedd {{(pid=70020) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1188.157379] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Triggering sync for uuid b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d {{(pid=70020) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1188.157379] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Triggering sync for uuid 5b69d3b2-c236-45f9-b35b-a9992b9c1c79 {{(pid=70020) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1188.157379] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Triggering sync for uuid 8dbb1de0-38de-493f-9512-b8754bab7bcb {{(pid=70020) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1188.157678] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Triggering sync for uuid 9e7bd10b-3a78-48d8-9b66-e3646635be6d {{(pid=70020) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1188.157820] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Triggering sync for uuid 9962b718-ca31-4f09-91f3-133dd68612ad {{(pid=70020) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1188.158048] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Triggering sync for uuid cc46e905-958e-4dc3-8f83-f8b5680f94de {{(pid=70020) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1188.158639] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Triggering sync for uuid ca63297c-b7bc-45e9-8850-f46050905c26 {{(pid=70020) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1188.158783] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Triggering sync for uuid 1b25f8db-457e-4948-b9da-35e2fa5b897e {{(pid=70020) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1188.159503] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Acquiring lock "13f6daa5-d859-40ed-b1b0-edd7717b8df3" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1188.159700] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Acquiring lock "96966bf2-a9ff-48ba-be3f-c767e7b6eedd" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1188.160023] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "96966bf2-a9ff-48ba-be3f-c767e7b6eedd" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1188.160892] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Acquiring lock "b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1188.160892] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1188.161369] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Acquiring lock "5b69d3b2-c236-45f9-b35b-a9992b9c1c79" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1188.161884] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "5b69d3b2-c236-45f9-b35b-a9992b9c1c79" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1188.162223] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Acquiring lock "8dbb1de0-38de-493f-9512-b8754bab7bcb" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1188.162520] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Acquiring lock "9e7bd10b-3a78-48d8-9b66-e3646635be6d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1188.162788] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "9e7bd10b-3a78-48d8-9b66-e3646635be6d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1188.163120] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Acquiring lock "9962b718-ca31-4f09-91f3-133dd68612ad" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1188.163491] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "9962b718-ca31-4f09-91f3-133dd68612ad" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1188.163694] env[70020]: INFO nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] During sync_power_state the instance has a pending task (resize_prep). Skip. [ 1188.163946] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "9962b718-ca31-4f09-91f3-133dd68612ad" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.164261] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Acquiring lock "cc46e905-958e-4dc3-8f83-f8b5680f94de" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1188.164545] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "cc46e905-958e-4dc3-8f83-f8b5680f94de" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1188.164960] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Acquiring lock "ca63297c-b7bc-45e9-8850-f46050905c26" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1188.165196] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "ca63297c-b7bc-45e9-8850-f46050905c26" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1188.165445] env[70020]: INFO nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] During sync_power_state the instance has a pending task (resize_prep). Skip. [ 1188.165745] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "ca63297c-b7bc-45e9-8850-f46050905c26" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.165985] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Acquiring lock "1b25f8db-457e-4948-b9da-35e2fa5b897e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1188.166284] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "1b25f8db-457e-4948-b9da-35e2fa5b897e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1188.167647] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cd9518a-784b-4bb2-8412-08f627fb0b45 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.171483] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9525238a-9f09-4408-a4d6-9c5c52089639 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.174337] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a80a25bd-3d62-4e75-9b32-2ee21a97ad62 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.177035] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ecc7c14-b848-4be3-afab-9bd0abd68d68 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.179573] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f651bd59-d824-4d91-973c-32a75aaf0860 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.182300] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2e88ae0-6db8-4286-bbb9-e9d67deb73a7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.358858] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 5.003s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.361215] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 3.680s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1188.409404] env[70020]: INFO nova.scheduler.client.report [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Deleted allocations for instance 8dbb1de0-38de-493f-9512-b8754bab7bcb [ 1188.695773] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "5b69d3b2-c236-45f9-b35b-a9992b9c1c79" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.534s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.704469] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "96966bf2-a9ff-48ba-be3f-c767e7b6eedd" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.544s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.704826] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "cc46e905-958e-4dc3-8f83-f8b5680f94de" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.540s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.705143] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "9e7bd10b-3a78-48d8-9b66-e3646635be6d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.542s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.709581] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "1b25f8db-457e-4948-b9da-35e2fa5b897e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.543s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.718056] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.557s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.866264] env[70020]: INFO nova.compute.claims [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1188.917539] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d50d0520-e4cf-4ce8-8d13-65957dc64986 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "8dbb1de0-38de-493f-9512-b8754bab7bcb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.772s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.918567] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "8dbb1de0-38de-493f-9512-b8754bab7bcb" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.756s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1188.919463] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d76d93ff-648e-4005-9df2-dc4283ccdfe9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.928606] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9681caa8-9c2b-456e-a71c-3900740fdbb8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.372727] env[70020]: INFO nova.compute.resource_tracker [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Updating resource usage from migration a8690c1d-73a1-4113-9ff2-0115c012fdfd [ 1189.462016] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "8dbb1de0-38de-493f-9512-b8754bab7bcb" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.543s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1189.544422] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b0f9ac6-e346-4e49-8add-31ec6c222c41 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.552495] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66e9ea6a-1b53-4541-9149-cf079c55fb54 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.583339] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b858952b-f9d3-460c-a386-1101e112e89e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.591155] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fbc838c-d138-4bfe-9b8b-8aabdcff4903 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.604904] env[70020]: DEBUG nova.compute.provider_tree [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1190.107772] env[70020]: DEBUG nova.scheduler.client.report [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1190.616755] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.255s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1190.619166] env[70020]: INFO nova.compute.manager [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Migrating [ 1190.624040] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c5dd6104-1e19-4d67-9b64-6584a241809a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.322s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1190.624269] env[70020]: DEBUG nova.objects.instance [None req-c5dd6104-1e19-4d67-9b64-6584a241809a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lazy-loading 'resources' on Instance uuid 13f6daa5-d859-40ed-b1b0-edd7717b8df3 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1190.769110] env[70020]: DEBUG oslo_concurrency.lockutils [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "c9a3fb0f-95bf-4b51-ac06-99415acfa9cb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1190.769348] env[70020]: DEBUG oslo_concurrency.lockutils [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "c9a3fb0f-95bf-4b51-ac06-99415acfa9cb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1190.795725] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4173a4e-1a73-4701-9e62-a493e939c0a9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.803630] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2620aa22-2d37-45e0-bca4-c4d19f1567ad {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.834316] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ff2cc0b-e5da-4779-9536-3b8486020e2b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.841330] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-897db354-41ff-49cf-93f5-ddeb2253042c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.854128] env[70020]: DEBUG nova.compute.provider_tree [None req-c5dd6104-1e19-4d67-9b64-6584a241809a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1191.132356] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "refresh_cache-ca63297c-b7bc-45e9-8850-f46050905c26" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1191.132631] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquired lock "refresh_cache-ca63297c-b7bc-45e9-8850-f46050905c26" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1191.132715] env[70020]: DEBUG nova.network.neutron [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1191.272122] env[70020]: DEBUG nova.compute.manager [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1191.357555] env[70020]: DEBUG nova.scheduler.client.report [None req-c5dd6104-1e19-4d67-9b64-6584a241809a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1191.802658] env[70020]: DEBUG oslo_concurrency.lockutils [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1191.822923] env[70020]: DEBUG nova.network.neutron [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Updating instance_info_cache with network_info: [{"id": "2e2d4a2d-9a02-4d7e-b369-98b035a79190", "address": "fa:16:3e:41:7e:ca", "network": {"id": "c7c6ab1d-12b0-4699-ab0e-47b8d23ee3bc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2032377329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3f6d704dd464768953c41d34d34d944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6a6f7bb-6106-4cfd-9aef-b85628d0cefa", "external-id": "nsx-vlan-transportzone-194", "segmentation_id": 194, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e2d4a2d-9a", "ovs_interfaceid": "2e2d4a2d-9a02-4d7e-b369-98b035a79190", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1191.862270] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c5dd6104-1e19-4d67-9b64-6584a241809a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.238s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1191.864506] env[70020]: DEBUG oslo_concurrency.lockutils [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.520s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1191.866480] env[70020]: INFO nova.compute.claims [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1191.892282] env[70020]: INFO nova.scheduler.client.report [None req-c5dd6104-1e19-4d67-9b64-6584a241809a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Deleted allocations for instance 13f6daa5-d859-40ed-b1b0-edd7717b8df3 [ 1192.326133] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Releasing lock "refresh_cache-ca63297c-b7bc-45e9-8850-f46050905c26" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1192.399735] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c5dd6104-1e19-4d67-9b64-6584a241809a tempest-ServersTestJSON-1543902807 tempest-ServersTestJSON-1543902807-project-member] Lock "13f6daa5-d859-40ed-b1b0-edd7717b8df3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.644s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1192.400754] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "13f6daa5-d859-40ed-b1b0-edd7717b8df3" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 4.242s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1192.401073] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b8384ddc-c0b0-4cd8-94bf-be6b83be11d7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.412274] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7eadf7f-7335-4987-9a2e-235bd0b67439 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.943637] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "13f6daa5-d859-40ed-b1b0-edd7717b8df3" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.543s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1193.025729] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b6bf4e4-959f-4c78-a8dc-6497e259f6c3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.033158] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12cf05cf-bce8-4499-bb8b-1843103e0685 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.062131] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f348e22d-734f-4b4b-809c-5ef3458f88e8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.069447] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-639b073f-7746-495a-a818-85b49f37fe65 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.083159] env[70020]: DEBUG nova.compute.provider_tree [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1193.588704] env[70020]: DEBUG nova.scheduler.client.report [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1193.841326] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bafeade8-a7dd-4f0e-8fb3-7d9d798d7f5d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.861383] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Updating instance 'ca63297c-b7bc-45e9-8850-f46050905c26' progress to 0 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1194.092497] env[70020]: DEBUG oslo_concurrency.lockutils [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.228s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1194.093200] env[70020]: DEBUG nova.compute.manager [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1194.098681] env[70020]: DEBUG oslo_concurrency.lockutils [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 6.254s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1194.367348] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1194.367678] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8b0a227a-ebe8-4e04-8775-865016753761 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.375164] env[70020]: DEBUG oslo_vmware.api [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1194.375164] env[70020]: value = "task-3619144" [ 1194.375164] env[70020]: _type = "Task" [ 1194.375164] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.384014] env[70020]: DEBUG oslo_vmware.api [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619144, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.600252] env[70020]: DEBUG nova.compute.utils [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1194.601705] env[70020]: DEBUG nova.compute.manager [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1194.601881] env[70020]: DEBUG nova.network.neutron [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1194.606259] env[70020]: INFO nova.compute.claims [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1194.673403] env[70020]: DEBUG nova.policy [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bfc47981781549c5b4d57df1bd46d556', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8f0c0bea4d604c7987011bfa9f00c6da', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1194.884779] env[70020]: DEBUG oslo_vmware.api [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619144, 'name': PowerOffVM_Task, 'duration_secs': 0.186857} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.885279] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1194.885547] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Updating instance 'ca63297c-b7bc-45e9-8850-f46050905c26' progress to 17 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1194.987211] env[70020]: DEBUG nova.network.neutron [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Successfully created port: ca802e98-cbc6-48e4-8381-beef930ad40f {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1195.111604] env[70020]: DEBUG nova.compute.manager [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1195.115879] env[70020]: INFO nova.compute.resource_tracker [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Updating resource usage from migration 9227eea3-db08-4f3a-9838-4e33a88b040b [ 1195.303278] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a67a7d45-5eb0-4aaf-95bc-703b3481b6f4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.312760] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe4e303c-56ce-42bb-ae8a-2040a5ebf43d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.343698] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73ee5677-4967-48f5-83c2-27cb19c63448 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.350964] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95a8fd10-af52-43a7-98a9-b4b1ac3b8991 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.364099] env[70020]: DEBUG nova.compute.provider_tree [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1195.392923] env[70020]: DEBUG nova.virt.hardware [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1195.392923] env[70020]: DEBUG nova.virt.hardware [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1195.392923] env[70020]: DEBUG nova.virt.hardware [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1195.393286] env[70020]: DEBUG nova.virt.hardware [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1195.393286] env[70020]: DEBUG nova.virt.hardware [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1195.393366] env[70020]: DEBUG nova.virt.hardware [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1195.393496] env[70020]: DEBUG nova.virt.hardware [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1195.393651] env[70020]: DEBUG nova.virt.hardware [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1195.393903] env[70020]: DEBUG nova.virt.hardware [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1195.394049] env[70020]: DEBUG nova.virt.hardware [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1195.394228] env[70020]: DEBUG nova.virt.hardware [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1195.399277] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5e3d4d7a-884d-44f1-b6c8-476821403289 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.416316] env[70020]: DEBUG oslo_vmware.api [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1195.416316] env[70020]: value = "task-3619145" [ 1195.416316] env[70020]: _type = "Task" [ 1195.416316] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.424470] env[70020]: DEBUG oslo_vmware.api [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619145, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.869209] env[70020]: DEBUG nova.scheduler.client.report [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1195.929011] env[70020]: DEBUG oslo_vmware.api [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619145, 'name': ReconfigVM_Task, 'duration_secs': 0.166095} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.929372] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Updating instance 'ca63297c-b7bc-45e9-8850-f46050905c26' progress to 33 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1196.126022] env[70020]: DEBUG nova.compute.manager [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1196.195979] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ca1554df-72ae-4e01-9319-438a06d4af95 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "interface-cc46e905-958e-4dc3-8f83-f8b5680f94de-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1196.196354] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ca1554df-72ae-4e01-9319-438a06d4af95 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "interface-cc46e905-958e-4dc3-8f83-f8b5680f94de-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1196.196675] env[70020]: DEBUG nova.objects.instance [None req-ca1554df-72ae-4e01-9319-438a06d4af95 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lazy-loading 'flavor' on Instance uuid cc46e905-958e-4dc3-8f83-f8b5680f94de {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1196.339977] env[70020]: DEBUG nova.virt.hardware [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1196.340241] env[70020]: DEBUG nova.virt.hardware [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1196.340397] env[70020]: DEBUG nova.virt.hardware [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1196.340720] env[70020]: DEBUG nova.virt.hardware [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1196.340720] env[70020]: DEBUG nova.virt.hardware [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1196.340859] env[70020]: DEBUG nova.virt.hardware [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1196.341071] env[70020]: DEBUG nova.virt.hardware [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1196.341260] env[70020]: DEBUG nova.virt.hardware [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1196.341447] env[70020]: DEBUG nova.virt.hardware [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1196.341612] env[70020]: DEBUG nova.virt.hardware [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1196.341847] env[70020]: DEBUG nova.virt.hardware [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1196.342719] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6203aac-6480-43a6-9c02-8ee59cb1eccc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.350700] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b0c29b9-c40f-4b0a-a3cb-f46515c1c346 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.372652] env[70020]: DEBUG oslo_concurrency.lockutils [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.276s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1196.373060] env[70020]: INFO nova.compute.manager [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Migrating [ 1196.382967] env[70020]: DEBUG oslo_concurrency.lockutils [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.580s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1196.384390] env[70020]: INFO nova.compute.claims [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1196.436816] env[70020]: DEBUG nova.virt.hardware [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1196.436816] env[70020]: DEBUG nova.virt.hardware [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1196.436816] env[70020]: DEBUG nova.virt.hardware [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1196.436816] env[70020]: DEBUG nova.virt.hardware [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1196.436816] env[70020]: DEBUG nova.virt.hardware [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1196.436816] env[70020]: DEBUG nova.virt.hardware [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1196.436816] env[70020]: DEBUG nova.virt.hardware [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1196.436816] env[70020]: DEBUG nova.virt.hardware [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1196.436816] env[70020]: DEBUG nova.virt.hardware [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1196.437191] env[70020]: DEBUG nova.virt.hardware [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1196.437191] env[70020]: DEBUG nova.virt.hardware [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1196.442452] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Reconfiguring VM instance instance-00000072 to detach disk 2000 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1196.442713] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-31fd7bf4-e3ed-4475-a706-7b1e4d0400d9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.463278] env[70020]: DEBUG oslo_vmware.api [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1196.463278] env[70020]: value = "task-3619146" [ 1196.463278] env[70020]: _type = "Task" [ 1196.463278] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.472700] env[70020]: DEBUG oslo_vmware.api [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619146, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.655840] env[70020]: DEBUG nova.compute.manager [req-e834a744-6d65-474d-be45-8fe7cb6c2455 req-6f026004-71af-4891-8a4f-05b0e23b7d8b service nova] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Received event network-vif-plugged-ca802e98-cbc6-48e4-8381-beef930ad40f {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1196.656081] env[70020]: DEBUG oslo_concurrency.lockutils [req-e834a744-6d65-474d-be45-8fe7cb6c2455 req-6f026004-71af-4891-8a4f-05b0e23b7d8b service nova] Acquiring lock "c29d577e-9498-40b1-8e49-caff821cb80a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1196.656375] env[70020]: DEBUG oslo_concurrency.lockutils [req-e834a744-6d65-474d-be45-8fe7cb6c2455 req-6f026004-71af-4891-8a4f-05b0e23b7d8b service nova] Lock "c29d577e-9498-40b1-8e49-caff821cb80a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1196.656550] env[70020]: DEBUG oslo_concurrency.lockutils [req-e834a744-6d65-474d-be45-8fe7cb6c2455 req-6f026004-71af-4891-8a4f-05b0e23b7d8b service nova] Lock "c29d577e-9498-40b1-8e49-caff821cb80a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1196.656715] env[70020]: DEBUG nova.compute.manager [req-e834a744-6d65-474d-be45-8fe7cb6c2455 req-6f026004-71af-4891-8a4f-05b0e23b7d8b service nova] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] No waiting events found dispatching network-vif-plugged-ca802e98-cbc6-48e4-8381-beef930ad40f {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1196.656876] env[70020]: WARNING nova.compute.manager [req-e834a744-6d65-474d-be45-8fe7cb6c2455 req-6f026004-71af-4891-8a4f-05b0e23b7d8b service nova] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Received unexpected event network-vif-plugged-ca802e98-cbc6-48e4-8381-beef930ad40f for instance with vm_state building and task_state spawning. [ 1196.711108] env[70020]: DEBUG nova.objects.instance [None req-ca1554df-72ae-4e01-9319-438a06d4af95 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lazy-loading 'pci_requests' on Instance uuid cc46e905-958e-4dc3-8f83-f8b5680f94de {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1196.759496] env[70020]: DEBUG nova.network.neutron [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Successfully updated port: ca802e98-cbc6-48e4-8381-beef930ad40f {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1196.893436] env[70020]: DEBUG oslo_concurrency.lockutils [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "refresh_cache-9962b718-ca31-4f09-91f3-133dd68612ad" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1196.893732] env[70020]: DEBUG oslo_concurrency.lockutils [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquired lock "refresh_cache-9962b718-ca31-4f09-91f3-133dd68612ad" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1196.893775] env[70020]: DEBUG nova.network.neutron [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1196.973729] env[70020]: DEBUG oslo_vmware.api [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619146, 'name': ReconfigVM_Task, 'duration_secs': 0.1609} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.974834] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Reconfigured VM instance instance-00000072 to detach disk 2000 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1196.974981] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4231a83f-ad72-41b2-a2be-f7b26625e238 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.000075] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] ca63297c-b7bc-45e9-8850-f46050905c26/ca63297c-b7bc-45e9-8850-f46050905c26.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1197.000399] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-efc0e72e-08cf-4ffb-96c7-35494fb6f6d8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.018378] env[70020]: DEBUG oslo_vmware.api [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1197.018378] env[70020]: value = "task-3619147" [ 1197.018378] env[70020]: _type = "Task" [ 1197.018378] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.026550] env[70020]: DEBUG oslo_vmware.api [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619147, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.214115] env[70020]: DEBUG nova.objects.base [None req-ca1554df-72ae-4e01-9319-438a06d4af95 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=70020) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1197.214228] env[70020]: DEBUG nova.network.neutron [None req-ca1554df-72ae-4e01-9319-438a06d4af95 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1197.266355] env[70020]: DEBUG oslo_concurrency.lockutils [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Acquiring lock "refresh_cache-c29d577e-9498-40b1-8e49-caff821cb80a" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1197.266355] env[70020]: DEBUG oslo_concurrency.lockutils [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Acquired lock "refresh_cache-c29d577e-9498-40b1-8e49-caff821cb80a" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1197.266355] env[70020]: DEBUG nova.network.neutron [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1197.368361] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ca1554df-72ae-4e01-9319-438a06d4af95 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "interface-cc46e905-958e-4dc3-8f83-f8b5680f94de-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.172s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1197.534328] env[70020]: DEBUG oslo_vmware.api [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619147, 'name': ReconfigVM_Task, 'duration_secs': 0.272324} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.537439] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Reconfigured VM instance instance-00000072 to attach disk [datastore1] ca63297c-b7bc-45e9-8850-f46050905c26/ca63297c-b7bc-45e9-8850-f46050905c26.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1197.539032] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Updating instance 'ca63297c-b7bc-45e9-8850-f46050905c26' progress to 50 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1197.653341] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0738cf2-6d7c-434a-a76c-3e798dc7a146 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.661396] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6ea53c6-8c15-4c43-91c8-4135fefd3c8e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.693703] env[70020]: DEBUG nova.network.neutron [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Updating instance_info_cache with network_info: [{"id": "6c9093b6-4b0e-4ac8-b1b1-4e54e43363a5", "address": "fa:16:3e:24:81:fe", "network": {"id": "ba2c4d3c-4191-4de5-8533-90ca5dfc1dc0", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-599216784-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e3eae740ef84ef88aef113ed4d6e57b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c9093b6-4b", "ovs_interfaceid": "6c9093b6-4b0e-4ac8-b1b1-4e54e43363a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1197.695375] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adb2df1d-e008-4a9b-b1cb-93a4f1c491fc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.703540] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ab34f43-37af-442a-af24-b8f965484be1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.719167] env[70020]: DEBUG nova.compute.provider_tree [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1197.805944] env[70020]: DEBUG nova.network.neutron [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1197.981019] env[70020]: DEBUG nova.network.neutron [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Updating instance_info_cache with network_info: [{"id": "ca802e98-cbc6-48e4-8381-beef930ad40f", "address": "fa:16:3e:0a:f0:f9", "network": {"id": "399655d9-8420-4a66-a9a0-b6dba2622840", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-482356041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8f0c0bea4d604c7987011bfa9f00c6da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f7a1f33a-9466-4c83-89f6-fd990f47b1ef", "external-id": "nsx-vlan-transportzone-90", "segmentation_id": 90, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca802e98-cb", "ovs_interfaceid": "ca802e98-cbc6-48e4-8381-beef930ad40f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1198.048551] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f19ee775-b9d5-4ac3-bb8d-37fec50afc5b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.067712] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdc93dc6-aa79-48d7-be04-bd90618f6ec0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.085927] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Updating instance 'ca63297c-b7bc-45e9-8850-f46050905c26' progress to 67 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1198.199298] env[70020]: DEBUG oslo_concurrency.lockutils [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Releasing lock "refresh_cache-9962b718-ca31-4f09-91f3-133dd68612ad" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1198.224539] env[70020]: DEBUG nova.scheduler.client.report [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1198.483761] env[70020]: DEBUG oslo_concurrency.lockutils [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Releasing lock "refresh_cache-c29d577e-9498-40b1-8e49-caff821cb80a" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1198.484121] env[70020]: DEBUG nova.compute.manager [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Instance network_info: |[{"id": "ca802e98-cbc6-48e4-8381-beef930ad40f", "address": "fa:16:3e:0a:f0:f9", "network": {"id": "399655d9-8420-4a66-a9a0-b6dba2622840", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-482356041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8f0c0bea4d604c7987011bfa9f00c6da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f7a1f33a-9466-4c83-89f6-fd990f47b1ef", "external-id": "nsx-vlan-transportzone-90", "segmentation_id": 90, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca802e98-cb", "ovs_interfaceid": "ca802e98-cbc6-48e4-8381-beef930ad40f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1198.484557] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0a:f0:f9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f7a1f33a-9466-4c83-89f6-fd990f47b1ef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ca802e98-cbc6-48e4-8381-beef930ad40f', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1198.492035] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Creating folder: Project (8f0c0bea4d604c7987011bfa9f00c6da). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1198.492321] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d2f37a97-7ba0-4262-8ac3-d62829c37abb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.502714] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Created folder: Project (8f0c0bea4d604c7987011bfa9f00c6da) in parent group-v721521. [ 1198.502896] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Creating folder: Instances. Parent ref: group-v721822. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1198.503130] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e707f6af-49b9-49d6-926a-ba7dfbcd5959 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.511561] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Created folder: Instances in parent group-v721822. [ 1198.511786] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1198.511966] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1198.512510] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-95b4d8da-fae7-4cfb-833d-f0a303186f67 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.532907] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1198.532907] env[70020]: value = "task-3619150" [ 1198.532907] env[70020]: _type = "Task" [ 1198.532907] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.540631] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619150, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.627838] env[70020]: DEBUG nova.network.neutron [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Port 2e2d4a2d-9a02-4d7e-b369-98b035a79190 binding to destination host cpu-1 is already ACTIVE {{(pid=70020) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1198.730219] env[70020]: DEBUG oslo_concurrency.lockutils [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.347s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1198.730773] env[70020]: DEBUG nova.compute.manager [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1198.893881] env[70020]: DEBUG nova.compute.manager [req-220a1f36-77d5-487f-924c-93aed3676caa req-35d31da0-ed1a-4346-9ed5-1a805de58e5f service nova] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Received event network-changed-ca802e98-cbc6-48e4-8381-beef930ad40f {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1198.894196] env[70020]: DEBUG nova.compute.manager [req-220a1f36-77d5-487f-924c-93aed3676caa req-35d31da0-ed1a-4346-9ed5-1a805de58e5f service nova] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Refreshing instance network info cache due to event network-changed-ca802e98-cbc6-48e4-8381-beef930ad40f. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1198.896988] env[70020]: DEBUG oslo_concurrency.lockutils [req-220a1f36-77d5-487f-924c-93aed3676caa req-35d31da0-ed1a-4346-9ed5-1a805de58e5f service nova] Acquiring lock "refresh_cache-c29d577e-9498-40b1-8e49-caff821cb80a" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1198.896988] env[70020]: DEBUG oslo_concurrency.lockutils [req-220a1f36-77d5-487f-924c-93aed3676caa req-35d31da0-ed1a-4346-9ed5-1a805de58e5f service nova] Acquired lock "refresh_cache-c29d577e-9498-40b1-8e49-caff821cb80a" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1198.896988] env[70020]: DEBUG nova.network.neutron [req-220a1f36-77d5-487f-924c-93aed3676caa req-35d31da0-ed1a-4346-9ed5-1a805de58e5f service nova] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Refreshing network info cache for port ca802e98-cbc6-48e4-8381-beef930ad40f {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1199.047758] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619150, 'name': CreateVM_Task, 'duration_secs': 0.350927} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.048762] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1199.049144] env[70020]: DEBUG oslo_concurrency.lockutils [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1199.049312] env[70020]: DEBUG oslo_concurrency.lockutils [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1199.049651] env[70020]: DEBUG oslo_concurrency.lockutils [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1199.050134] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2ac8564-7901-466e-84fb-bcc2216dfe01 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.054715] env[70020]: DEBUG oslo_vmware.api [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Waiting for the task: (returnval){ [ 1199.054715] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52315926-c738-f5a3-26d0-2afd2c9cbb25" [ 1199.054715] env[70020]: _type = "Task" [ 1199.054715] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.062412] env[70020]: DEBUG oslo_vmware.api [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52315926-c738-f5a3-26d0-2afd2c9cbb25, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.236876] env[70020]: DEBUG nova.compute.utils [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1199.239298] env[70020]: DEBUG nova.compute.manager [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1199.239298] env[70020]: DEBUG nova.network.neutron [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1199.284271] env[70020]: DEBUG nova.policy [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '291265cdc1164603a9011173b1457c31', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '74b060ffb3ac4ecd95dcd85d4744dc2a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1199.398111] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquiring lock "399d55b7-2a79-4849-89b6-ff8d1c0d33e1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1199.398352] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lock "399d55b7-2a79-4849-89b6-ff8d1c0d33e1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1199.522625] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ee2fe36a-401c-42f6-957f-53ff13bc885a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "interface-cc46e905-958e-4dc3-8f83-f8b5680f94de-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1199.523105] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ee2fe36a-401c-42f6-957f-53ff13bc885a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "interface-cc46e905-958e-4dc3-8f83-f8b5680f94de-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1199.523566] env[70020]: DEBUG nova.objects.instance [None req-ee2fe36a-401c-42f6-957f-53ff13bc885a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lazy-loading 'flavor' on Instance uuid cc46e905-958e-4dc3-8f83-f8b5680f94de {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1199.570713] env[70020]: DEBUG oslo_vmware.api [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52315926-c738-f5a3-26d0-2afd2c9cbb25, 'name': SearchDatastore_Task, 'duration_secs': 0.014694} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.571258] env[70020]: DEBUG oslo_concurrency.lockutils [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1199.571649] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1199.572055] env[70020]: DEBUG oslo_concurrency.lockutils [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1199.572382] env[70020]: DEBUG oslo_concurrency.lockutils [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1199.572724] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1199.573422] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f6436048-9929-46de-b37f-05e40c04c44d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.591287] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1199.591287] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1199.591287] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1f75a5c-8f2d-4da8-9ac7-617095776417 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.599857] env[70020]: DEBUG oslo_vmware.api [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Waiting for the task: (returnval){ [ 1199.599857] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52396580-25de-6b7a-04a1-726768a235a0" [ 1199.599857] env[70020]: _type = "Task" [ 1199.599857] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.609668] env[70020]: DEBUG oslo_vmware.api [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52396580-25de-6b7a-04a1-726768a235a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.657603] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "ca63297c-b7bc-45e9-8850-f46050905c26-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1199.658071] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "ca63297c-b7bc-45e9-8850-f46050905c26-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1199.658424] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "ca63297c-b7bc-45e9-8850-f46050905c26-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1199.718936] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e122b2d1-b8fb-40c3-8e47-6ea6aa61156e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.721136] env[70020]: DEBUG nova.network.neutron [req-220a1f36-77d5-487f-924c-93aed3676caa req-35d31da0-ed1a-4346-9ed5-1a805de58e5f service nova] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Updated VIF entry in instance network info cache for port ca802e98-cbc6-48e4-8381-beef930ad40f. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1199.721498] env[70020]: DEBUG nova.network.neutron [req-220a1f36-77d5-487f-924c-93aed3676caa req-35d31da0-ed1a-4346-9ed5-1a805de58e5f service nova] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Updating instance_info_cache with network_info: [{"id": "ca802e98-cbc6-48e4-8381-beef930ad40f", "address": "fa:16:3e:0a:f0:f9", "network": {"id": "399655d9-8420-4a66-a9a0-b6dba2622840", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-482356041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8f0c0bea4d604c7987011bfa9f00c6da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f7a1f33a-9466-4c83-89f6-fd990f47b1ef", "external-id": "nsx-vlan-transportzone-90", "segmentation_id": 90, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca802e98-cb", "ovs_interfaceid": "ca802e98-cbc6-48e4-8381-beef930ad40f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1199.739326] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Updating instance '9962b718-ca31-4f09-91f3-133dd68612ad' progress to 0 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1199.743809] env[70020]: DEBUG nova.network.neutron [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Successfully created port: 9912a098-b09d-4c69-819f-47a4d7da500b {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1199.746564] env[70020]: DEBUG nova.compute.manager [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1199.901155] env[70020]: DEBUG nova.compute.manager [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1200.096933] env[70020]: DEBUG nova.objects.instance [None req-ee2fe36a-401c-42f6-957f-53ff13bc885a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lazy-loading 'pci_requests' on Instance uuid cc46e905-958e-4dc3-8f83-f8b5680f94de {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1200.111207] env[70020]: DEBUG oslo_vmware.api [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52396580-25de-6b7a-04a1-726768a235a0, 'name': SearchDatastore_Task, 'duration_secs': 0.025477} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.111973] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-238e88af-d3f3-463e-8523-0693f1efc270 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.117471] env[70020]: DEBUG oslo_vmware.api [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Waiting for the task: (returnval){ [ 1200.117471] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5277e1c1-c4e5-3e07-54b2-57b55f84e9e8" [ 1200.117471] env[70020]: _type = "Task" [ 1200.117471] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.124954] env[70020]: DEBUG oslo_vmware.api [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5277e1c1-c4e5-3e07-54b2-57b55f84e9e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.224674] env[70020]: DEBUG oslo_concurrency.lockutils [req-220a1f36-77d5-487f-924c-93aed3676caa req-35d31da0-ed1a-4346-9ed5-1a805de58e5f service nova] Releasing lock "refresh_cache-c29d577e-9498-40b1-8e49-caff821cb80a" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1200.245057] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1200.245361] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0f07c625-bd9f-44c1-9385-f006d6302c68 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.254398] env[70020]: DEBUG oslo_vmware.api [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1200.254398] env[70020]: value = "task-3619151" [ 1200.254398] env[70020]: _type = "Task" [ 1200.254398] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.262383] env[70020]: DEBUG oslo_vmware.api [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619151, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.423906] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1200.424212] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1200.425729] env[70020]: INFO nova.compute.claims [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1200.599993] env[70020]: DEBUG nova.objects.base [None req-ee2fe36a-401c-42f6-957f-53ff13bc885a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=70020) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1200.600237] env[70020]: DEBUG nova.network.neutron [None req-ee2fe36a-401c-42f6-957f-53ff13bc885a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1200.627899] env[70020]: DEBUG oslo_vmware.api [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5277e1c1-c4e5-3e07-54b2-57b55f84e9e8, 'name': SearchDatastore_Task, 'duration_secs': 0.010351} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.628172] env[70020]: DEBUG oslo_concurrency.lockutils [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1200.628488] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] c29d577e-9498-40b1-8e49-caff821cb80a/c29d577e-9498-40b1-8e49-caff821cb80a.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1200.628669] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b803858c-4b71-476c-a012-1d2dbe46947e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.635081] env[70020]: DEBUG oslo_vmware.api [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Waiting for the task: (returnval){ [ 1200.635081] env[70020]: value = "task-3619152" [ 1200.635081] env[70020]: _type = "Task" [ 1200.635081] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.636409] env[70020]: DEBUG nova.policy [None req-ee2fe36a-401c-42f6-957f-53ff13bc885a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '914fc4078a214da891e7d12d242504cb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0573da12f56f4b18a103e4e9fdfb9c19', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1200.645576] env[70020]: DEBUG oslo_vmware.api [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Task: {'id': task-3619152, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.697918] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "refresh_cache-ca63297c-b7bc-45e9-8850-f46050905c26" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1200.697918] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquired lock "refresh_cache-ca63297c-b7bc-45e9-8850-f46050905c26" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1200.697918] env[70020]: DEBUG nova.network.neutron [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1200.756637] env[70020]: DEBUG nova.compute.manager [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1200.767811] env[70020]: DEBUG oslo_vmware.api [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619151, 'name': PowerOffVM_Task, 'duration_secs': 0.194523} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.768136] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1200.768325] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Updating instance '9962b718-ca31-4f09-91f3-133dd68612ad' progress to 17 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1200.784113] env[70020]: DEBUG nova.virt.hardware [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1200.784416] env[70020]: DEBUG nova.virt.hardware [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1200.784581] env[70020]: DEBUG nova.virt.hardware [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1200.784766] env[70020]: DEBUG nova.virt.hardware [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1200.784908] env[70020]: DEBUG nova.virt.hardware [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1200.785065] env[70020]: DEBUG nova.virt.hardware [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1200.785277] env[70020]: DEBUG nova.virt.hardware [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1200.785434] env[70020]: DEBUG nova.virt.hardware [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1200.785594] env[70020]: DEBUG nova.virt.hardware [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1200.785752] env[70020]: DEBUG nova.virt.hardware [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1200.785916] env[70020]: DEBUG nova.virt.hardware [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1200.786804] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56d13d1d-5fe6-4ea7-a223-a4acdf62d51f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.794635] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1494563b-72c8-42d7-84c0-f580da3f9898 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.904809] env[70020]: DEBUG nova.network.neutron [None req-ee2fe36a-401c-42f6-957f-53ff13bc885a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Successfully created port: db65a4ad-ec52-4dd1-bb59-3c000719f018 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1201.148745] env[70020]: DEBUG oslo_vmware.api [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Task: {'id': task-3619152, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.502843} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.149157] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] c29d577e-9498-40b1-8e49-caff821cb80a/c29d577e-9498-40b1-8e49-caff821cb80a.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1201.149450] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1201.149740] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a20b6095-0848-4f9d-b19e-0b719f280c80 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.156696] env[70020]: DEBUG oslo_vmware.api [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Waiting for the task: (returnval){ [ 1201.156696] env[70020]: value = "task-3619153" [ 1201.156696] env[70020]: _type = "Task" [ 1201.156696] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.168932] env[70020]: DEBUG oslo_vmware.api [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Task: {'id': task-3619153, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.277019] env[70020]: DEBUG nova.virt.hardware [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1201.277582] env[70020]: DEBUG nova.virt.hardware [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1201.277850] env[70020]: DEBUG nova.virt.hardware [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1201.278211] env[70020]: DEBUG nova.virt.hardware [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1201.278748] env[70020]: DEBUG nova.virt.hardware [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1201.279027] env[70020]: DEBUG nova.virt.hardware [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1201.279376] env[70020]: DEBUG nova.virt.hardware [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1201.279668] env[70020]: DEBUG nova.virt.hardware [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1201.279949] env[70020]: DEBUG nova.virt.hardware [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1201.280245] env[70020]: DEBUG nova.virt.hardware [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1201.280540] env[70020]: DEBUG nova.virt.hardware [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1201.289085] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-769ca8f8-7133-48f3-8cdc-08e2a9c4ada6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.305984] env[70020]: DEBUG oslo_vmware.api [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1201.305984] env[70020]: value = "task-3619154" [ 1201.305984] env[70020]: _type = "Task" [ 1201.305984] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.317509] env[70020]: DEBUG oslo_vmware.api [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619154, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.374872] env[70020]: DEBUG nova.network.neutron [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Successfully updated port: 9912a098-b09d-4c69-819f-47a4d7da500b {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1201.468122] env[70020]: DEBUG nova.compute.manager [req-95d1c62b-97ce-408f-97d0-6087bf282ce7 req-a3afae8d-1b26-430e-a460-7f24339e5ce3 service nova] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Received event network-vif-plugged-9912a098-b09d-4c69-819f-47a4d7da500b {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1201.468380] env[70020]: DEBUG oslo_concurrency.lockutils [req-95d1c62b-97ce-408f-97d0-6087bf282ce7 req-a3afae8d-1b26-430e-a460-7f24339e5ce3 service nova] Acquiring lock "c9a3fb0f-95bf-4b51-ac06-99415acfa9cb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1201.468555] env[70020]: DEBUG oslo_concurrency.lockutils [req-95d1c62b-97ce-408f-97d0-6087bf282ce7 req-a3afae8d-1b26-430e-a460-7f24339e5ce3 service nova] Lock "c9a3fb0f-95bf-4b51-ac06-99415acfa9cb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1201.468728] env[70020]: DEBUG oslo_concurrency.lockutils [req-95d1c62b-97ce-408f-97d0-6087bf282ce7 req-a3afae8d-1b26-430e-a460-7f24339e5ce3 service nova] Lock "c9a3fb0f-95bf-4b51-ac06-99415acfa9cb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1201.468853] env[70020]: DEBUG nova.compute.manager [req-95d1c62b-97ce-408f-97d0-6087bf282ce7 req-a3afae8d-1b26-430e-a460-7f24339e5ce3 service nova] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] No waiting events found dispatching network-vif-plugged-9912a098-b09d-4c69-819f-47a4d7da500b {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1201.469169] env[70020]: WARNING nova.compute.manager [req-95d1c62b-97ce-408f-97d0-6087bf282ce7 req-a3afae8d-1b26-430e-a460-7f24339e5ce3 service nova] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Received unexpected event network-vif-plugged-9912a098-b09d-4c69-819f-47a4d7da500b for instance with vm_state building and task_state spawning. [ 1201.485666] env[70020]: DEBUG nova.network.neutron [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Updating instance_info_cache with network_info: [{"id": "2e2d4a2d-9a02-4d7e-b369-98b035a79190", "address": "fa:16:3e:41:7e:ca", "network": {"id": "c7c6ab1d-12b0-4699-ab0e-47b8d23ee3bc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2032377329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3f6d704dd464768953c41d34d34d944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6a6f7bb-6106-4cfd-9aef-b85628d0cefa", "external-id": "nsx-vlan-transportzone-194", "segmentation_id": 194, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e2d4a2d-9a", "ovs_interfaceid": "2e2d4a2d-9a02-4d7e-b369-98b035a79190", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1201.599496] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-506bd35e-37b5-405e-9b55-6c212cc0a758 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.607210] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebddb2f3-adca-42b8-bb0e-7eac2c830b3f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.637452] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7079d5d-f8a6-4768-ae2d-9e20328eb8d8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.644644] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-782dfd8b-6407-470a-97b8-cd0a963ae37e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.657144] env[70020]: DEBUG nova.compute.provider_tree [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1201.665244] env[70020]: DEBUG oslo_vmware.api [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Task: {'id': task-3619153, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070586} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.666008] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1201.666727] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d892aed1-6a54-4579-aa97-55d4db2f5568 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.687695] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Reconfiguring VM instance instance-00000074 to attach disk [datastore1] c29d577e-9498-40b1-8e49-caff821cb80a/c29d577e-9498-40b1-8e49-caff821cb80a.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1201.688132] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bad9760b-14c4-434a-8ab9-2dead139437a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.705942] env[70020]: DEBUG oslo_vmware.api [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Waiting for the task: (returnval){ [ 1201.705942] env[70020]: value = "task-3619155" [ 1201.705942] env[70020]: _type = "Task" [ 1201.705942] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.712912] env[70020]: DEBUG oslo_vmware.api [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Task: {'id': task-3619155, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.816422] env[70020]: DEBUG oslo_vmware.api [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619154, 'name': ReconfigVM_Task, 'duration_secs': 0.210142} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.816733] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Updating instance '9962b718-ca31-4f09-91f3-133dd68612ad' progress to 33 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1201.877265] env[70020]: DEBUG oslo_concurrency.lockutils [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "refresh_cache-c9a3fb0f-95bf-4b51-ac06-99415acfa9cb" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1201.877631] env[70020]: DEBUG oslo_concurrency.lockutils [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquired lock "refresh_cache-c9a3fb0f-95bf-4b51-ac06-99415acfa9cb" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1201.877801] env[70020]: DEBUG nova.network.neutron [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1201.988076] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Releasing lock "refresh_cache-ca63297c-b7bc-45e9-8850-f46050905c26" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1202.162427] env[70020]: DEBUG nova.scheduler.client.report [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1202.216284] env[70020]: DEBUG oslo_vmware.api [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Task: {'id': task-3619155, 'name': ReconfigVM_Task, 'duration_secs': 0.363747} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.216570] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Reconfigured VM instance instance-00000074 to attach disk [datastore1] c29d577e-9498-40b1-8e49-caff821cb80a/c29d577e-9498-40b1-8e49-caff821cb80a.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1202.217235] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0201e941-8f2f-4a32-8ef2-21c04311b0f8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.224256] env[70020]: DEBUG oslo_vmware.api [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Waiting for the task: (returnval){ [ 1202.224256] env[70020]: value = "task-3619156" [ 1202.224256] env[70020]: _type = "Task" [ 1202.224256] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.231836] env[70020]: DEBUG oslo_vmware.api [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Task: {'id': task-3619156, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.323534] env[70020]: DEBUG nova.virt.hardware [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1202.323866] env[70020]: DEBUG nova.virt.hardware [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1202.323985] env[70020]: DEBUG nova.virt.hardware [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1202.324190] env[70020]: DEBUG nova.virt.hardware [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1202.324332] env[70020]: DEBUG nova.virt.hardware [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1202.324479] env[70020]: DEBUG nova.virt.hardware [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1202.324680] env[70020]: DEBUG nova.virt.hardware [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1202.324830] env[70020]: DEBUG nova.virt.hardware [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1202.324991] env[70020]: DEBUG nova.virt.hardware [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1202.325162] env[70020]: DEBUG nova.virt.hardware [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1202.325388] env[70020]: DEBUG nova.virt.hardware [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1202.331165] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Reconfiguring VM instance instance-0000006f to detach disk 2000 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1202.331492] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f0c4825c-eb09-4639-af1d-afc38d6bf425 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.351167] env[70020]: DEBUG oslo_vmware.api [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1202.351167] env[70020]: value = "task-3619157" [ 1202.351167] env[70020]: _type = "Task" [ 1202.351167] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.358785] env[70020]: DEBUG oslo_vmware.api [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619157, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.410694] env[70020]: DEBUG nova.network.neutron [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1202.511983] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e947c5d7-f7e4-40e3-aefa-86e5f5172a32 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.532702] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0298b95-8851-478c-8f52-0c3a6aaace52 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.540864] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Updating instance 'ca63297c-b7bc-45e9-8850-f46050905c26' progress to 83 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1202.543551] env[70020]: DEBUG nova.network.neutron [None req-ee2fe36a-401c-42f6-957f-53ff13bc885a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Successfully updated port: db65a4ad-ec52-4dd1-bb59-3c000719f018 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1202.583223] env[70020]: DEBUG nova.network.neutron [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Updating instance_info_cache with network_info: [{"id": "9912a098-b09d-4c69-819f-47a4d7da500b", "address": "fa:16:3e:9b:b1:b1", "network": {"id": "b566519a-edda-4c57-92ca-a702e586c638", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-790170115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74b060ffb3ac4ecd95dcd85d4744dc2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9912a098-b0", "ovs_interfaceid": "9912a098-b09d-4c69-819f-47a4d7da500b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1202.667490] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.243s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1202.668112] env[70020]: DEBUG nova.compute.manager [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1202.671579] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1202.733172] env[70020]: DEBUG oslo_vmware.api [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Task: {'id': task-3619156, 'name': Rename_Task, 'duration_secs': 0.192241} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.733410] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1202.733868] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b755c5e6-581e-400f-8197-822fedd26b5a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.739928] env[70020]: DEBUG oslo_vmware.api [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Waiting for the task: (returnval){ [ 1202.739928] env[70020]: value = "task-3619158" [ 1202.739928] env[70020]: _type = "Task" [ 1202.739928] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.746917] env[70020]: DEBUG oslo_vmware.api [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Task: {'id': task-3619158, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.862496] env[70020]: DEBUG oslo_vmware.api [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619157, 'name': ReconfigVM_Task, 'duration_secs': 0.189433} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.862798] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Reconfigured VM instance instance-0000006f to detach disk 2000 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1202.863570] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df0cd83a-c45b-41bd-9f34-c6312bf0bd9e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.885112] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] 9962b718-ca31-4f09-91f3-133dd68612ad/9962b718-ca31-4f09-91f3-133dd68612ad.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1202.885374] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-575840f4-cb80-4ff0-91ac-269c96b14793 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.901824] env[70020]: DEBUG oslo_vmware.api [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1202.901824] env[70020]: value = "task-3619159" [ 1202.901824] env[70020]: _type = "Task" [ 1202.901824] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.910923] env[70020]: DEBUG oslo_vmware.api [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619159, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.048099] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1203.048171] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5c423003-0378-4fc3-a21b-748beb545b3d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.050307] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ee2fe36a-401c-42f6-957f-53ff13bc885a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "refresh_cache-cc46e905-958e-4dc3-8f83-f8b5680f94de" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1203.050472] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ee2fe36a-401c-42f6-957f-53ff13bc885a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquired lock "refresh_cache-cc46e905-958e-4dc3-8f83-f8b5680f94de" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1203.050706] env[70020]: DEBUG nova.network.neutron [None req-ee2fe36a-401c-42f6-957f-53ff13bc885a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1203.057698] env[70020]: DEBUG oslo_vmware.api [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1203.057698] env[70020]: value = "task-3619160" [ 1203.057698] env[70020]: _type = "Task" [ 1203.057698] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.068995] env[70020]: DEBUG oslo_vmware.api [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619160, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.085546] env[70020]: DEBUG oslo_concurrency.lockutils [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Releasing lock "refresh_cache-c9a3fb0f-95bf-4b51-ac06-99415acfa9cb" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1203.086071] env[70020]: DEBUG nova.compute.manager [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Instance network_info: |[{"id": "9912a098-b09d-4c69-819f-47a4d7da500b", "address": "fa:16:3e:9b:b1:b1", "network": {"id": "b566519a-edda-4c57-92ca-a702e586c638", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-790170115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74b060ffb3ac4ecd95dcd85d4744dc2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9912a098-b0", "ovs_interfaceid": "9912a098-b09d-4c69-819f-47a4d7da500b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1203.086708] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9b:b1:b1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1002b79b-224e-41e3-a484-4245a767147a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9912a098-b09d-4c69-819f-47a4d7da500b', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1203.095040] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1203.095306] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1203.095541] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-09eb920a-3327-4d5f-a076-320fc58b4588 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.115680] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1203.115680] env[70020]: value = "task-3619161" [ 1203.115680] env[70020]: _type = "Task" [ 1203.115680] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.123434] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619161, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.135160] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1203.135301] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=70020) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1203.135493] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager.update_available_resource {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1203.173495] env[70020]: DEBUG nova.compute.utils [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1203.175379] env[70020]: DEBUG nova.compute.manager [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1203.175595] env[70020]: DEBUG nova.network.neutron [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1203.229012] env[70020]: DEBUG nova.policy [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b241829c09b0497f9f30f85c2d2fe85a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '11384e127368415d82f2e8a7e985b17e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1203.249660] env[70020]: DEBUG oslo_vmware.api [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Task: {'id': task-3619158, 'name': PowerOnVM_Task, 'duration_secs': 0.483593} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.250018] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1203.250183] env[70020]: INFO nova.compute.manager [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Took 7.13 seconds to spawn the instance on the hypervisor. [ 1203.250295] env[70020]: DEBUG nova.compute.manager [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1203.251912] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed58465d-e8ac-4677-91a9-577bd1b4d623 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.412876] env[70020]: DEBUG oslo_vmware.api [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619159, 'name': ReconfigVM_Task, 'duration_secs': 0.279239} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.413262] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Reconfigured VM instance instance-0000006f to attach disk [datastore1] 9962b718-ca31-4f09-91f3-133dd68612ad/9962b718-ca31-4f09-91f3-133dd68612ad.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1203.413550] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Updating instance '9962b718-ca31-4f09-91f3-133dd68612ad' progress to 50 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1203.568479] env[70020]: DEBUG oslo_vmware.api [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619160, 'name': PowerOnVM_Task, 'duration_secs': 0.408526} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.571023] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1203.571023] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5f179af4-0f53-4f3c-ad0b-bc9f7f0f428c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Updating instance 'ca63297c-b7bc-45e9-8850-f46050905c26' progress to 100 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1203.591980] env[70020]: WARNING nova.network.neutron [None req-ee2fe36a-401c-42f6-957f-53ff13bc885a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] 95372772-c776-417b-938a-f27c0d43d6ec already exists in list: networks containing: ['95372772-c776-417b-938a-f27c0d43d6ec']. ignoring it [ 1203.626691] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619161, 'name': CreateVM_Task, 'duration_secs': 0.335142} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.626858] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1203.636109] env[70020]: DEBUG nova.network.neutron [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Successfully created port: b3b2c85d-9fe6-403f-bc6d-d003d2a06aef {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1203.638709] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1203.639066] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1203.639181] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1203.639253] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=70020) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1203.640335] env[70020]: DEBUG oslo_concurrency.lockutils [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1203.640488] env[70020]: DEBUG oslo_concurrency.lockutils [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1203.640798] env[70020]: DEBUG oslo_concurrency.lockutils [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1203.641833] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3557b41b-97c8-4930-b14e-887ca8845a8b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.645344] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba60bbad-db02-4199-9c1c-6f1f77d16d27 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.656148] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14dbfee2-f0bd-421c-b576-ec25e26abf0a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.660271] env[70020]: DEBUG oslo_vmware.api [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1203.660271] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52bd4c07-0f95-7a30-9fbb-2c2f295c029b" [ 1203.660271] env[70020]: _type = "Task" [ 1203.660271] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.675326] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-041a6c5e-24a6-4992-b76d-e9f4f5061748 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.682504] env[70020]: DEBUG nova.compute.manager [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1203.689018] env[70020]: DEBUG nova.compute.manager [req-edc5a054-6a15-40b9-af32-3fc3e923ef6c req-f69d08ff-c03c-490d-98e5-25b5b9e8ad5d service nova] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Received event network-changed-9912a098-b09d-4c69-819f-47a4d7da500b {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1203.689018] env[70020]: DEBUG nova.compute.manager [req-edc5a054-6a15-40b9-af32-3fc3e923ef6c req-f69d08ff-c03c-490d-98e5-25b5b9e8ad5d service nova] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Refreshing instance network info cache due to event network-changed-9912a098-b09d-4c69-819f-47a4d7da500b. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1203.689018] env[70020]: DEBUG oslo_concurrency.lockutils [req-edc5a054-6a15-40b9-af32-3fc3e923ef6c req-f69d08ff-c03c-490d-98e5-25b5b9e8ad5d service nova] Acquiring lock "refresh_cache-c9a3fb0f-95bf-4b51-ac06-99415acfa9cb" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1203.689018] env[70020]: DEBUG oslo_concurrency.lockutils [req-edc5a054-6a15-40b9-af32-3fc3e923ef6c req-f69d08ff-c03c-490d-98e5-25b5b9e8ad5d service nova] Acquired lock "refresh_cache-c9a3fb0f-95bf-4b51-ac06-99415acfa9cb" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1203.689018] env[70020]: DEBUG nova.network.neutron [req-edc5a054-6a15-40b9-af32-3fc3e923ef6c req-f69d08ff-c03c-490d-98e5-25b5b9e8ad5d service nova] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Refreshing network info cache for port 9912a098-b09d-4c69-819f-47a4d7da500b {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1203.689515] env[70020]: DEBUG oslo_vmware.api [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52bd4c07-0f95-7a30-9fbb-2c2f295c029b, 'name': SearchDatastore_Task, 'duration_secs': 0.014188} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.689981] env[70020]: DEBUG oslo_concurrency.lockutils [None req-607bac29-7312-4870-824c-98e19e9b0c3e tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquiring lock "9e7bd10b-3a78-48d8-9b66-e3646635be6d" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1203.690200] env[70020]: DEBUG oslo_concurrency.lockutils [None req-607bac29-7312-4870-824c-98e19e9b0c3e tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lock "9e7bd10b-3a78-48d8-9b66-e3646635be6d" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1203.691350] env[70020]: DEBUG oslo_concurrency.lockutils [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1203.691565] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1203.691785] env[70020]: DEBUG oslo_concurrency.lockutils [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1203.691929] env[70020]: DEBUG oslo_concurrency.lockutils [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1203.692115] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1203.694517] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-99cbab20-5cb2-4b42-962b-c7005d596254 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.699106] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9cc997d-d265-4489-91e1-8a4366193e06 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.733694] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179200MB free_disk=76GB free_vcpus=48 pci_devices=None {{(pid=70020) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1203.733871] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1203.734104] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1203.739178] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1203.739367] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1203.740260] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43bf87be-37e7-4495-bea3-c9985dd0b1bc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.745792] env[70020]: DEBUG oslo_vmware.api [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1203.745792] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52313201-bad3-1c05-9717-48aa5bcc945a" [ 1203.745792] env[70020]: _type = "Task" [ 1203.745792] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.754321] env[70020]: DEBUG oslo_vmware.api [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52313201-bad3-1c05-9717-48aa5bcc945a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.769830] env[70020]: INFO nova.compute.manager [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Took 16.45 seconds to build instance. [ 1203.922350] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2b410bc-9083-4774-9c33-77d9e8747ce4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.943084] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4b7f2a4-cd7c-4afc-9a67-54e7b3800e5e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.964174] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Updating instance '9962b718-ca31-4f09-91f3-133dd68612ad' progress to 67 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1203.984156] env[70020]: DEBUG nova.network.neutron [None req-ee2fe36a-401c-42f6-957f-53ff13bc885a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Updating instance_info_cache with network_info: [{"id": "0c413503-8ce1-454a-a6b0-3fb75d647a04", "address": "fa:16:3e:ca:8d:a7", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c413503-8c", "ovs_interfaceid": "0c413503-8ce1-454a-a6b0-3fb75d647a04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "db65a4ad-ec52-4dd1-bb59-3c000719f018", "address": "fa:16:3e:6b:ce:f5", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb65a4ad-ec", "ovs_interfaceid": "db65a4ad-ec52-4dd1-bb59-3c000719f018", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1204.199401] env[70020]: INFO nova.compute.manager [None req-607bac29-7312-4870-824c-98e19e9b0c3e tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Detaching volume 9e3399bb-294d-4dc8-865a-5a6fc34ad741 [ 1204.237033] env[70020]: INFO nova.virt.block_device [None req-607bac29-7312-4870-824c-98e19e9b0c3e tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Attempting to driver detach volume 9e3399bb-294d-4dc8-865a-5a6fc34ad741 from mountpoint /dev/sdb [ 1204.237033] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-607bac29-7312-4870-824c-98e19e9b0c3e tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Volume detach. Driver type: vmdk {{(pid=70020) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1204.237033] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-607bac29-7312-4870-824c-98e19e9b0c3e tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721816', 'volume_id': '9e3399bb-294d-4dc8-865a-5a6fc34ad741', 'name': 'volume-9e3399bb-294d-4dc8-865a-5a6fc34ad741', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9e7bd10b-3a78-48d8-9b66-e3646635be6d', 'attached_at': '', 'detached_at': '', 'volume_id': '9e3399bb-294d-4dc8-865a-5a6fc34ad741', 'serial': '9e3399bb-294d-4dc8-865a-5a6fc34ad741'} {{(pid=70020) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1204.238221] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09115160-8abf-4474-8136-69a1f784b134 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.273816] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0319069e-58a6-42aa-b0e6-add62ba1bb8f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.276929] env[70020]: DEBUG oslo_concurrency.lockutils [None req-03a84288-c6e8-4eb0-a438-97484d9c4d80 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Lock "c29d577e-9498-40b1-8e49-caff821cb80a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.550s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1204.281290] env[70020]: DEBUG oslo_vmware.api [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52313201-bad3-1c05-9717-48aa5bcc945a, 'name': SearchDatastore_Task, 'duration_secs': 0.009922} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.282390] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ed60d24-ac96-492b-b61d-f4e5ce4b8f10 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.287343] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47077ca5-d09c-4073-a293-0846142871e9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.291021] env[70020]: DEBUG oslo_vmware.api [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1204.291021] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]529c8c8c-550f-43a2-51a5-dcddeb380c0c" [ 1204.291021] env[70020]: _type = "Task" [ 1204.291021] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.316670] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e926fbf-905a-4a82-bac9-50e7017ff813 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.322670] env[70020]: DEBUG oslo_vmware.api [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]529c8c8c-550f-43a2-51a5-dcddeb380c0c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.335469] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-607bac29-7312-4870-824c-98e19e9b0c3e tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] The volume has not been displaced from its original location: [datastore1] volume-9e3399bb-294d-4dc8-865a-5a6fc34ad741/volume-9e3399bb-294d-4dc8-865a-5a6fc34ad741.vmdk. No consolidation needed. {{(pid=70020) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1204.340798] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-607bac29-7312-4870-824c-98e19e9b0c3e tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Reconfiguring VM instance instance-00000068 to detach disk 2001 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1204.341132] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-64f6fe55-aa74-4ab6-ae2b-37c7265e025d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.362212] env[70020]: DEBUG oslo_vmware.api [None req-607bac29-7312-4870-824c-98e19e9b0c3e tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1204.362212] env[70020]: value = "task-3619162" [ 1204.362212] env[70020]: _type = "Task" [ 1204.362212] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.371631] env[70020]: DEBUG oslo_vmware.api [None req-607bac29-7312-4870-824c-98e19e9b0c3e tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619162, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.456957] env[70020]: DEBUG nova.network.neutron [req-edc5a054-6a15-40b9-af32-3fc3e923ef6c req-f69d08ff-c03c-490d-98e5-25b5b9e8ad5d service nova] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Updated VIF entry in instance network info cache for port 9912a098-b09d-4c69-819f-47a4d7da500b. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1204.457598] env[70020]: DEBUG nova.network.neutron [req-edc5a054-6a15-40b9-af32-3fc3e923ef6c req-f69d08ff-c03c-490d-98e5-25b5b9e8ad5d service nova] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Updating instance_info_cache with network_info: [{"id": "9912a098-b09d-4c69-819f-47a4d7da500b", "address": "fa:16:3e:9b:b1:b1", "network": {"id": "b566519a-edda-4c57-92ca-a702e586c638", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-790170115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74b060ffb3ac4ecd95dcd85d4744dc2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9912a098-b0", "ovs_interfaceid": "9912a098-b09d-4c69-819f-47a4d7da500b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1204.486964] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ee2fe36a-401c-42f6-957f-53ff13bc885a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Releasing lock "refresh_cache-cc46e905-958e-4dc3-8f83-f8b5680f94de" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1204.488308] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ee2fe36a-401c-42f6-957f-53ff13bc885a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "cc46e905-958e-4dc3-8f83-f8b5680f94de" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1204.488308] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ee2fe36a-401c-42f6-957f-53ff13bc885a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquired lock "cc46e905-958e-4dc3-8f83-f8b5680f94de" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1204.488669] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cb96292-b669-4192-b3d8-b329666f9476 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.506233] env[70020]: DEBUG nova.virt.hardware [None req-ee2fe36a-401c-42f6-957f-53ff13bc885a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1204.506684] env[70020]: DEBUG nova.virt.hardware [None req-ee2fe36a-401c-42f6-957f-53ff13bc885a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1204.506937] env[70020]: DEBUG nova.virt.hardware [None req-ee2fe36a-401c-42f6-957f-53ff13bc885a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1204.507576] env[70020]: DEBUG nova.virt.hardware [None req-ee2fe36a-401c-42f6-957f-53ff13bc885a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1204.507576] env[70020]: DEBUG nova.virt.hardware [None req-ee2fe36a-401c-42f6-957f-53ff13bc885a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1204.507576] env[70020]: DEBUG nova.virt.hardware [None req-ee2fe36a-401c-42f6-957f-53ff13bc885a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1204.507748] env[70020]: DEBUG nova.virt.hardware [None req-ee2fe36a-401c-42f6-957f-53ff13bc885a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1204.507807] env[70020]: DEBUG nova.virt.hardware [None req-ee2fe36a-401c-42f6-957f-53ff13bc885a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1204.507969] env[70020]: DEBUG nova.virt.hardware [None req-ee2fe36a-401c-42f6-957f-53ff13bc885a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1204.508139] env[70020]: DEBUG nova.virt.hardware [None req-ee2fe36a-401c-42f6-957f-53ff13bc885a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1204.508382] env[70020]: DEBUG nova.virt.hardware [None req-ee2fe36a-401c-42f6-957f-53ff13bc885a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1204.514663] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ee2fe36a-401c-42f6-957f-53ff13bc885a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Reconfiguring VM to attach interface {{(pid=70020) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1204.515712] env[70020]: DEBUG nova.network.neutron [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Port 6c9093b6-4b0e-4ac8-b1b1-4e54e43363a5 binding to destination host cpu-1 is already ACTIVE {{(pid=70020) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1204.517314] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2742a7dc-2841-4782-aa08-912366747a6c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.535710] env[70020]: DEBUG oslo_vmware.api [None req-ee2fe36a-401c-42f6-957f-53ff13bc885a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1204.535710] env[70020]: value = "task-3619163" [ 1204.535710] env[70020]: _type = "Task" [ 1204.535710] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.543670] env[70020]: DEBUG oslo_vmware.api [None req-ee2fe36a-401c-42f6-957f-53ff13bc885a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619163, 'name': ReconfigVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.700027] env[70020]: DEBUG nova.compute.manager [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1204.730023] env[70020]: DEBUG nova.virt.hardware [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1204.730293] env[70020]: DEBUG nova.virt.hardware [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1204.730449] env[70020]: DEBUG nova.virt.hardware [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1204.730728] env[70020]: DEBUG nova.virt.hardware [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1204.731046] env[70020]: DEBUG nova.virt.hardware [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1204.731120] env[70020]: DEBUG nova.virt.hardware [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1204.731296] env[70020]: DEBUG nova.virt.hardware [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1204.731455] env[70020]: DEBUG nova.virt.hardware [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1204.731618] env[70020]: DEBUG nova.virt.hardware [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1204.731861] env[70020]: DEBUG nova.virt.hardware [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1204.732058] env[70020]: DEBUG nova.virt.hardware [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1204.732997] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-751dea85-773e-4927-aacb-b1d195bc23be {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.744857] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96247d1e-768e-450b-85c8-ebfaa3311a43 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.752126] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Applying migration context for instance 9962b718-ca31-4f09-91f3-133dd68612ad as it has an incoming, in-progress migration 9227eea3-db08-4f3a-9838-4e33a88b040b. Migration status is migrating {{(pid=70020) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1204.752481] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Applying migration context for instance ca63297c-b7bc-45e9-8850-f46050905c26 as it has an incoming, in-progress migration a8690c1d-73a1-4113-9ff2-0115c012fdfd. Migration status is finished {{(pid=70020) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1204.754682] env[70020]: INFO nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Updating resource usage from migration 9227eea3-db08-4f3a-9838-4e33a88b040b [ 1204.755183] env[70020]: INFO nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Updating resource usage from migration a8690c1d-73a1-4113-9ff2-0115c012fdfd [ 1204.777174] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 96966bf2-a9ff-48ba-be3f-c767e7b6eedd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1204.777439] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1204.777650] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 5b69d3b2-c236-45f9-b35b-a9992b9c1c79 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1204.777849] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 9e7bd10b-3a78-48d8-9b66-e3646635be6d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1204.778058] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance cc46e905-958e-4dc3-8f83-f8b5680f94de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1204.778261] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 1b25f8db-457e-4948-b9da-35e2fa5b897e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1204.778459] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Migration a8690c1d-73a1-4113-9ff2-0115c012fdfd is active on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 192, 'DISK_GB': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1204.778655] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance ca63297c-b7bc-45e9-8850-f46050905c26 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1204.778848] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance c29d577e-9498-40b1-8e49-caff821cb80a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1204.779054] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Migration 9227eea3-db08-4f3a-9838-4e33a88b040b is active on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 192, 'DISK_GB': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1204.779305] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 9962b718-ca31-4f09-91f3-133dd68612ad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1204.779494] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance c9a3fb0f-95bf-4b51-ac06-99415acfa9cb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1204.779631] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 399d55b7-2a79-4849-89b6-ff8d1c0d33e1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1204.779952] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Total usable vcpus: 48, total allocated vcpus: 13 {{(pid=70020) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1204.780196] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3136MB phys_disk=200GB used_disk=13GB total_vcpus=48 used_vcpus=13 pci_stats=[] {{(pid=70020) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1204.807417] env[70020]: DEBUG oslo_vmware.api [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]529c8c8c-550f-43a2-51a5-dcddeb380c0c, 'name': SearchDatastore_Task, 'duration_secs': 0.029803} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.807823] env[70020]: DEBUG oslo_concurrency.lockutils [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1204.808229] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] c9a3fb0f-95bf-4b51-ac06-99415acfa9cb/c9a3fb0f-95bf-4b51-ac06-99415acfa9cb.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1204.808613] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-389a2fae-7033-4d39-92fb-09cc9984488c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.817787] env[70020]: DEBUG oslo_vmware.api [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1204.817787] env[70020]: value = "task-3619164" [ 1204.817787] env[70020]: _type = "Task" [ 1204.817787] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.829481] env[70020]: DEBUG oslo_vmware.api [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619164, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.873497] env[70020]: DEBUG oslo_vmware.api [None req-607bac29-7312-4870-824c-98e19e9b0c3e tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619162, 'name': ReconfigVM_Task, 'duration_secs': 0.368867} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.873703] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-607bac29-7312-4870-824c-98e19e9b0c3e tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Reconfigured VM instance instance-00000068 to detach disk 2001 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1204.881520] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eddf14cc-7a4e-4b31-a27c-5cf47a492308 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.897224] env[70020]: DEBUG oslo_vmware.api [None req-607bac29-7312-4870-824c-98e19e9b0c3e tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1204.897224] env[70020]: value = "task-3619165" [ 1204.897224] env[70020]: _type = "Task" [ 1204.897224] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.905805] env[70020]: DEBUG oslo_vmware.api [None req-607bac29-7312-4870-824c-98e19e9b0c3e tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619165, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.961049] env[70020]: DEBUG oslo_concurrency.lockutils [req-edc5a054-6a15-40b9-af32-3fc3e923ef6c req-f69d08ff-c03c-490d-98e5-25b5b9e8ad5d service nova] Releasing lock "refresh_cache-c9a3fb0f-95bf-4b51-ac06-99415acfa9cb" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1204.961049] env[70020]: DEBUG nova.compute.manager [req-edc5a054-6a15-40b9-af32-3fc3e923ef6c req-f69d08ff-c03c-490d-98e5-25b5b9e8ad5d service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Received event network-vif-plugged-db65a4ad-ec52-4dd1-bb59-3c000719f018 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1204.961049] env[70020]: DEBUG oslo_concurrency.lockutils [req-edc5a054-6a15-40b9-af32-3fc3e923ef6c req-f69d08ff-c03c-490d-98e5-25b5b9e8ad5d service nova] Acquiring lock "cc46e905-958e-4dc3-8f83-f8b5680f94de-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1204.961049] env[70020]: DEBUG oslo_concurrency.lockutils [req-edc5a054-6a15-40b9-af32-3fc3e923ef6c req-f69d08ff-c03c-490d-98e5-25b5b9e8ad5d service nova] Lock "cc46e905-958e-4dc3-8f83-f8b5680f94de-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1204.961484] env[70020]: DEBUG oslo_concurrency.lockutils [req-edc5a054-6a15-40b9-af32-3fc3e923ef6c req-f69d08ff-c03c-490d-98e5-25b5b9e8ad5d service nova] Lock "cc46e905-958e-4dc3-8f83-f8b5680f94de-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1204.961484] env[70020]: DEBUG nova.compute.manager [req-edc5a054-6a15-40b9-af32-3fc3e923ef6c req-f69d08ff-c03c-490d-98e5-25b5b9e8ad5d service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] No waiting events found dispatching network-vif-plugged-db65a4ad-ec52-4dd1-bb59-3c000719f018 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1204.961700] env[70020]: WARNING nova.compute.manager [req-edc5a054-6a15-40b9-af32-3fc3e923ef6c req-f69d08ff-c03c-490d-98e5-25b5b9e8ad5d service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Received unexpected event network-vif-plugged-db65a4ad-ec52-4dd1-bb59-3c000719f018 for instance with vm_state active and task_state None. [ 1204.961700] env[70020]: DEBUG nova.compute.manager [req-edc5a054-6a15-40b9-af32-3fc3e923ef6c req-f69d08ff-c03c-490d-98e5-25b5b9e8ad5d service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Received event network-changed-db65a4ad-ec52-4dd1-bb59-3c000719f018 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1204.961838] env[70020]: DEBUG nova.compute.manager [req-edc5a054-6a15-40b9-af32-3fc3e923ef6c req-f69d08ff-c03c-490d-98e5-25b5b9e8ad5d service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Refreshing instance network info cache due to event network-changed-db65a4ad-ec52-4dd1-bb59-3c000719f018. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1204.962011] env[70020]: DEBUG oslo_concurrency.lockutils [req-edc5a054-6a15-40b9-af32-3fc3e923ef6c req-f69d08ff-c03c-490d-98e5-25b5b9e8ad5d service nova] Acquiring lock "refresh_cache-cc46e905-958e-4dc3-8f83-f8b5680f94de" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1204.962174] env[70020]: DEBUG oslo_concurrency.lockutils [req-edc5a054-6a15-40b9-af32-3fc3e923ef6c req-f69d08ff-c03c-490d-98e5-25b5b9e8ad5d service nova] Acquired lock "refresh_cache-cc46e905-958e-4dc3-8f83-f8b5680f94de" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1204.962330] env[70020]: DEBUG nova.network.neutron [req-edc5a054-6a15-40b9-af32-3fc3e923ef6c req-f69d08ff-c03c-490d-98e5-25b5b9e8ad5d service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Refreshing network info cache for port db65a4ad-ec52-4dd1-bb59-3c000719f018 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1205.007631] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f852ab8-1435-4f63-b6b9-abfa783f76e6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.016323] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1d27490-3a43-495f-bc1a-1d9856b847a9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.060298] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-784fa3db-e02e-483b-b359-e9c9775e06af {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.063489] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b1ffb680-c696-4a86-a070-b4a566606f76 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Acquiring lock "c29d577e-9498-40b1-8e49-caff821cb80a" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1205.063795] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b1ffb680-c696-4a86-a070-b4a566606f76 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Lock "c29d577e-9498-40b1-8e49-caff821cb80a" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1205.064073] env[70020]: INFO nova.compute.manager [None req-b1ffb680-c696-4a86-a070-b4a566606f76 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Rebooting instance [ 1205.074978] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9443d019-e5d3-463c-874b-197f8bc18acd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.079674] env[70020]: DEBUG oslo_vmware.api [None req-ee2fe36a-401c-42f6-957f-53ff13bc885a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619163, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.099611] env[70020]: DEBUG nova.compute.provider_tree [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1205.312158] env[70020]: DEBUG nova.compute.manager [req-3628add5-6051-4373-a083-8866cf67cb87 req-6d53536a-11c7-41cc-9da3-f548a7c64af5 service nova] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Received event network-vif-plugged-b3b2c85d-9fe6-403f-bc6d-d003d2a06aef {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1205.312573] env[70020]: DEBUG oslo_concurrency.lockutils [req-3628add5-6051-4373-a083-8866cf67cb87 req-6d53536a-11c7-41cc-9da3-f548a7c64af5 service nova] Acquiring lock "399d55b7-2a79-4849-89b6-ff8d1c0d33e1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1205.312573] env[70020]: DEBUG oslo_concurrency.lockutils [req-3628add5-6051-4373-a083-8866cf67cb87 req-6d53536a-11c7-41cc-9da3-f548a7c64af5 service nova] Lock "399d55b7-2a79-4849-89b6-ff8d1c0d33e1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1205.312749] env[70020]: DEBUG oslo_concurrency.lockutils [req-3628add5-6051-4373-a083-8866cf67cb87 req-6d53536a-11c7-41cc-9da3-f548a7c64af5 service nova] Lock "399d55b7-2a79-4849-89b6-ff8d1c0d33e1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1205.312909] env[70020]: DEBUG nova.compute.manager [req-3628add5-6051-4373-a083-8866cf67cb87 req-6d53536a-11c7-41cc-9da3-f548a7c64af5 service nova] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] No waiting events found dispatching network-vif-plugged-b3b2c85d-9fe6-403f-bc6d-d003d2a06aef {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1205.313088] env[70020]: WARNING nova.compute.manager [req-3628add5-6051-4373-a083-8866cf67cb87 req-6d53536a-11c7-41cc-9da3-f548a7c64af5 service nova] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Received unexpected event network-vif-plugged-b3b2c85d-9fe6-403f-bc6d-d003d2a06aef for instance with vm_state building and task_state spawning. [ 1205.328634] env[70020]: DEBUG oslo_vmware.api [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619164, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.358453] env[70020]: DEBUG nova.network.neutron [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Successfully updated port: b3b2c85d-9fe6-403f-bc6d-d003d2a06aef {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1205.407611] env[70020]: DEBUG oslo_vmware.api [None req-607bac29-7312-4870-824c-98e19e9b0c3e tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619165, 'name': ReconfigVM_Task, 'duration_secs': 0.171121} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.407945] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-607bac29-7312-4870-824c-98e19e9b0c3e tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721816', 'volume_id': '9e3399bb-294d-4dc8-865a-5a6fc34ad741', 'name': 'volume-9e3399bb-294d-4dc8-865a-5a6fc34ad741', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9e7bd10b-3a78-48d8-9b66-e3646635be6d', 'attached_at': '', 'detached_at': '', 'volume_id': '9e3399bb-294d-4dc8-865a-5a6fc34ad741', 'serial': '9e3399bb-294d-4dc8-865a-5a6fc34ad741'} {{(pid=70020) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1205.577535] env[70020]: DEBUG oslo_concurrency.lockutils [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "9962b718-ca31-4f09-91f3-133dd68612ad-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1205.577775] env[70020]: DEBUG oslo_concurrency.lockutils [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "9962b718-ca31-4f09-91f3-133dd68612ad-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1205.577945] env[70020]: DEBUG oslo_concurrency.lockutils [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "9962b718-ca31-4f09-91f3-133dd68612ad-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1205.578891] env[70020]: DEBUG oslo_vmware.api [None req-ee2fe36a-401c-42f6-957f-53ff13bc885a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619163, 'name': ReconfigVM_Task, 'duration_secs': 0.695657} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.583763] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ee2fe36a-401c-42f6-957f-53ff13bc885a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Releasing lock "cc46e905-958e-4dc3-8f83-f8b5680f94de" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1205.583895] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ee2fe36a-401c-42f6-957f-53ff13bc885a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Reconfigured VM to attach interface {{(pid=70020) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1205.588250] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b1ffb680-c696-4a86-a070-b4a566606f76 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Acquiring lock "refresh_cache-c29d577e-9498-40b1-8e49-caff821cb80a" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1205.588250] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b1ffb680-c696-4a86-a070-b4a566606f76 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Acquired lock "refresh_cache-c29d577e-9498-40b1-8e49-caff821cb80a" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1205.588250] env[70020]: DEBUG nova.network.neutron [None req-b1ffb680-c696-4a86-a070-b4a566606f76 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1205.602189] env[70020]: DEBUG nova.scheduler.client.report [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1205.652553] env[70020]: DEBUG oslo_concurrency.lockutils [None req-167f6f67-32fb-41df-a049-5b5027aa1e73 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "ca63297c-b7bc-45e9-8850-f46050905c26" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1205.652803] env[70020]: DEBUG oslo_concurrency.lockutils [None req-167f6f67-32fb-41df-a049-5b5027aa1e73 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "ca63297c-b7bc-45e9-8850-f46050905c26" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1205.652988] env[70020]: DEBUG nova.compute.manager [None req-167f6f67-32fb-41df-a049-5b5027aa1e73 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Going to confirm migration 6 {{(pid=70020) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1205.677070] env[70020]: DEBUG nova.network.neutron [req-edc5a054-6a15-40b9-af32-3fc3e923ef6c req-f69d08ff-c03c-490d-98e5-25b5b9e8ad5d service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Updated VIF entry in instance network info cache for port db65a4ad-ec52-4dd1-bb59-3c000719f018. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1205.677173] env[70020]: DEBUG nova.network.neutron [req-edc5a054-6a15-40b9-af32-3fc3e923ef6c req-f69d08ff-c03c-490d-98e5-25b5b9e8ad5d service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Updating instance_info_cache with network_info: [{"id": "0c413503-8ce1-454a-a6b0-3fb75d647a04", "address": "fa:16:3e:ca:8d:a7", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c413503-8c", "ovs_interfaceid": "0c413503-8ce1-454a-a6b0-3fb75d647a04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "db65a4ad-ec52-4dd1-bb59-3c000719f018", "address": "fa:16:3e:6b:ce:f5", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb65a4ad-ec", "ovs_interfaceid": "db65a4ad-ec52-4dd1-bb59-3c000719f018", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1205.828517] env[70020]: DEBUG oslo_vmware.api [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619164, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.598614} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.828821] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] c9a3fb0f-95bf-4b51-ac06-99415acfa9cb/c9a3fb0f-95bf-4b51-ac06-99415acfa9cb.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1205.829070] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1205.829344] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f274b06a-74c7-4d14-a338-5bf2c2222b5a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.835757] env[70020]: DEBUG oslo_vmware.api [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1205.835757] env[70020]: value = "task-3619166" [ 1205.835757] env[70020]: _type = "Task" [ 1205.835757] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.843535] env[70020]: DEBUG oslo_vmware.api [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619166, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.861702] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquiring lock "refresh_cache-399d55b7-2a79-4849-89b6-ff8d1c0d33e1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1205.861942] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquired lock "refresh_cache-399d55b7-2a79-4849-89b6-ff8d1c0d33e1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1205.862188] env[70020]: DEBUG nova.network.neutron [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1205.957675] env[70020]: DEBUG nova.objects.instance [None req-607bac29-7312-4870-824c-98e19e9b0c3e tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lazy-loading 'flavor' on Instance uuid 9e7bd10b-3a78-48d8-9b66-e3646635be6d {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1206.090473] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ee2fe36a-401c-42f6-957f-53ff13bc885a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "interface-cc46e905-958e-4dc3-8f83-f8b5680f94de-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.567s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1206.108764] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=70020) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1206.109029] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.375s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1206.179775] env[70020]: DEBUG oslo_concurrency.lockutils [req-edc5a054-6a15-40b9-af32-3fc3e923ef6c req-f69d08ff-c03c-490d-98e5-25b5b9e8ad5d service nova] Releasing lock "refresh_cache-cc46e905-958e-4dc3-8f83-f8b5680f94de" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1206.215747] env[70020]: DEBUG oslo_concurrency.lockutils [None req-167f6f67-32fb-41df-a049-5b5027aa1e73 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "refresh_cache-ca63297c-b7bc-45e9-8850-f46050905c26" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1206.215937] env[70020]: DEBUG oslo_concurrency.lockutils [None req-167f6f67-32fb-41df-a049-5b5027aa1e73 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquired lock "refresh_cache-ca63297c-b7bc-45e9-8850-f46050905c26" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1206.216128] env[70020]: DEBUG nova.network.neutron [None req-167f6f67-32fb-41df-a049-5b5027aa1e73 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1206.216462] env[70020]: DEBUG nova.objects.instance [None req-167f6f67-32fb-41df-a049-5b5027aa1e73 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lazy-loading 'info_cache' on Instance uuid ca63297c-b7bc-45e9-8850-f46050905c26 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1206.346049] env[70020]: DEBUG oslo_vmware.api [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619166, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089551} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.346427] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1206.347093] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7028ab9-caf5-4a04-a016-c5df9f066f90 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.369437] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Reconfiguring VM instance instance-00000075 to attach disk [datastore1] c9a3fb0f-95bf-4b51-ac06-99415acfa9cb/c9a3fb0f-95bf-4b51-ac06-99415acfa9cb.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1206.371559] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7ab14940-104d-40bc-8316-a02f36ba5ee1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.386060] env[70020]: DEBUG nova.network.neutron [None req-b1ffb680-c696-4a86-a070-b4a566606f76 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Updating instance_info_cache with network_info: [{"id": "ca802e98-cbc6-48e4-8381-beef930ad40f", "address": "fa:16:3e:0a:f0:f9", "network": {"id": "399655d9-8420-4a66-a9a0-b6dba2622840", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-482356041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8f0c0bea4d604c7987011bfa9f00c6da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f7a1f33a-9466-4c83-89f6-fd990f47b1ef", "external-id": "nsx-vlan-transportzone-90", "segmentation_id": 90, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca802e98-cb", "ovs_interfaceid": "ca802e98-cbc6-48e4-8381-beef930ad40f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1206.393630] env[70020]: DEBUG oslo_vmware.api [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1206.393630] env[70020]: value = "task-3619167" [ 1206.393630] env[70020]: _type = "Task" [ 1206.393630] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.404276] env[70020]: DEBUG oslo_vmware.api [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619167, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.418172] env[70020]: DEBUG nova.network.neutron [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1206.543235] env[70020]: DEBUG nova.network.neutron [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Updating instance_info_cache with network_info: [{"id": "b3b2c85d-9fe6-403f-bc6d-d003d2a06aef", "address": "fa:16:3e:d5:73:92", "network": {"id": "ed5fa951-66b0-45af-8569-314b06003130", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1735933033-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11384e127368415d82f2e8a7e985b17e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "af454577-0e89-41a3-a9f2-f39716f62fd5", "external-id": "nsx-vlan-transportzone-63", "segmentation_id": 63, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3b2c85d-9f", "ovs_interfaceid": "b3b2c85d-9fe6-403f-bc6d-d003d2a06aef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1206.616450] env[70020]: DEBUG oslo_concurrency.lockutils [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "refresh_cache-9962b718-ca31-4f09-91f3-133dd68612ad" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1206.616671] env[70020]: DEBUG oslo_concurrency.lockutils [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquired lock "refresh_cache-9962b718-ca31-4f09-91f3-133dd68612ad" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1206.616850] env[70020]: DEBUG nova.network.neutron [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1206.889251] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b1ffb680-c696-4a86-a070-b4a566606f76 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Releasing lock "refresh_cache-c29d577e-9498-40b1-8e49-caff821cb80a" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1206.904395] env[70020]: DEBUG oslo_vmware.api [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619167, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.964547] env[70020]: DEBUG oslo_concurrency.lockutils [None req-607bac29-7312-4870-824c-98e19e9b0c3e tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lock "9e7bd10b-3a78-48d8-9b66-e3646635be6d" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.274s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1207.045780] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Releasing lock "refresh_cache-399d55b7-2a79-4849-89b6-ff8d1c0d33e1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1207.046115] env[70020]: DEBUG nova.compute.manager [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Instance network_info: |[{"id": "b3b2c85d-9fe6-403f-bc6d-d003d2a06aef", "address": "fa:16:3e:d5:73:92", "network": {"id": "ed5fa951-66b0-45af-8569-314b06003130", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1735933033-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11384e127368415d82f2e8a7e985b17e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "af454577-0e89-41a3-a9f2-f39716f62fd5", "external-id": "nsx-vlan-transportzone-63", "segmentation_id": 63, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3b2c85d-9f", "ovs_interfaceid": "b3b2c85d-9fe6-403f-bc6d-d003d2a06aef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1207.046604] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d5:73:92', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'af454577-0e89-41a3-a9f2-f39716f62fd5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b3b2c85d-9fe6-403f-bc6d-d003d2a06aef', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1207.054827] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Creating folder: Project (11384e127368415d82f2e8a7e985b17e). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1207.054827] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7ed1bdd8-ae62-47ee-8149-6c33c54f8fdc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.066091] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Created folder: Project (11384e127368415d82f2e8a7e985b17e) in parent group-v721521. [ 1207.066091] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Creating folder: Instances. Parent ref: group-v721826. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1207.066340] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-94961e26-644f-4c61-b1be-1ab271227b53 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.074732] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Created folder: Instances in parent group-v721826. [ 1207.074960] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1207.075165] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1207.075383] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-40e57e8a-5162-4877-a788-a49a0ff89b54 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.093437] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1207.093437] env[70020]: value = "task-3619170" [ 1207.093437] env[70020]: _type = "Task" [ 1207.093437] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.100912] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619170, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.104543] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1207.104758] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1207.104917] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1207.105189] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1207.334941] env[70020]: DEBUG nova.network.neutron [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Updating instance_info_cache with network_info: [{"id": "6c9093b6-4b0e-4ac8-b1b1-4e54e43363a5", "address": "fa:16:3e:24:81:fe", "network": {"id": "ba2c4d3c-4191-4de5-8533-90ca5dfc1dc0", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-599216784-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e3eae740ef84ef88aef113ed4d6e57b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c9093b6-4b", "ovs_interfaceid": "6c9093b6-4b0e-4ac8-b1b1-4e54e43363a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1207.342484] env[70020]: DEBUG nova.compute.manager [req-5e154f6e-2cd7-4323-9eb6-c54c34f73e4d req-996e246e-ee1d-43d5-b454-415b7449321d service nova] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Received event network-changed-b3b2c85d-9fe6-403f-bc6d-d003d2a06aef {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1207.342675] env[70020]: DEBUG nova.compute.manager [req-5e154f6e-2cd7-4323-9eb6-c54c34f73e4d req-996e246e-ee1d-43d5-b454-415b7449321d service nova] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Refreshing instance network info cache due to event network-changed-b3b2c85d-9fe6-403f-bc6d-d003d2a06aef. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1207.342886] env[70020]: DEBUG oslo_concurrency.lockutils [req-5e154f6e-2cd7-4323-9eb6-c54c34f73e4d req-996e246e-ee1d-43d5-b454-415b7449321d service nova] Acquiring lock "refresh_cache-399d55b7-2a79-4849-89b6-ff8d1c0d33e1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1207.343039] env[70020]: DEBUG oslo_concurrency.lockutils [req-5e154f6e-2cd7-4323-9eb6-c54c34f73e4d req-996e246e-ee1d-43d5-b454-415b7449321d service nova] Acquired lock "refresh_cache-399d55b7-2a79-4849-89b6-ff8d1c0d33e1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1207.343205] env[70020]: DEBUG nova.network.neutron [req-5e154f6e-2cd7-4323-9eb6-c54c34f73e4d req-996e246e-ee1d-43d5-b454-415b7449321d service nova] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Refreshing network info cache for port b3b2c85d-9fe6-403f-bc6d-d003d2a06aef {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1207.393588] env[70020]: DEBUG nova.compute.manager [None req-b1ffb680-c696-4a86-a070-b4a566606f76 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1207.394569] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-169caf67-8a44-48c4-b139-1629878b0f76 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.405726] env[70020]: DEBUG oslo_vmware.api [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619167, 'name': ReconfigVM_Task, 'duration_secs': 0.933} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.407371] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Reconfigured VM instance instance-00000075 to attach disk [datastore1] c9a3fb0f-95bf-4b51-ac06-99415acfa9cb/c9a3fb0f-95bf-4b51-ac06-99415acfa9cb.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1207.410837] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e873cbba-803b-426f-b92f-c4908c266079 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.419459] env[70020]: DEBUG oslo_vmware.api [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1207.419459] env[70020]: value = "task-3619171" [ 1207.419459] env[70020]: _type = "Task" [ 1207.419459] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.430621] env[70020]: DEBUG oslo_vmware.api [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619171, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.603862] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619170, 'name': CreateVM_Task, 'duration_secs': 0.500418} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.604039] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1207.604729] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1207.604885] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1207.605259] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1207.605520] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27af073e-ca2c-4d71-a001-8ddb21176fae {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.610316] env[70020]: DEBUG oslo_vmware.api [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1207.610316] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52d1c5af-6aa3-bfd8-a4c1-16f2134d484d" [ 1207.610316] env[70020]: _type = "Task" [ 1207.610316] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.620276] env[70020]: DEBUG oslo_vmware.api [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52d1c5af-6aa3-bfd8-a4c1-16f2134d484d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.638870] env[70020]: DEBUG nova.network.neutron [None req-167f6f67-32fb-41df-a049-5b5027aa1e73 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Updating instance_info_cache with network_info: [{"id": "2e2d4a2d-9a02-4d7e-b369-98b035a79190", "address": "fa:16:3e:41:7e:ca", "network": {"id": "c7c6ab1d-12b0-4699-ab0e-47b8d23ee3bc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2032377329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3f6d704dd464768953c41d34d34d944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6a6f7bb-6106-4cfd-9aef-b85628d0cefa", "external-id": "nsx-vlan-transportzone-194", "segmentation_id": 194, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e2d4a2d-9a", "ovs_interfaceid": "2e2d4a2d-9a02-4d7e-b369-98b035a79190", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1207.837996] env[70020]: DEBUG oslo_concurrency.lockutils [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Releasing lock "refresh_cache-9962b718-ca31-4f09-91f3-133dd68612ad" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1207.929240] env[70020]: DEBUG oslo_vmware.api [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619171, 'name': Rename_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.098062] env[70020]: DEBUG nova.network.neutron [req-5e154f6e-2cd7-4323-9eb6-c54c34f73e4d req-996e246e-ee1d-43d5-b454-415b7449321d service nova] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Updated VIF entry in instance network info cache for port b3b2c85d-9fe6-403f-bc6d-d003d2a06aef. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1208.098462] env[70020]: DEBUG nova.network.neutron [req-5e154f6e-2cd7-4323-9eb6-c54c34f73e4d req-996e246e-ee1d-43d5-b454-415b7449321d service nova] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Updating instance_info_cache with network_info: [{"id": "b3b2c85d-9fe6-403f-bc6d-d003d2a06aef", "address": "fa:16:3e:d5:73:92", "network": {"id": "ed5fa951-66b0-45af-8569-314b06003130", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1735933033-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11384e127368415d82f2e8a7e985b17e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "af454577-0e89-41a3-a9f2-f39716f62fd5", "external-id": "nsx-vlan-transportzone-63", "segmentation_id": 63, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3b2c85d-9f", "ovs_interfaceid": "b3b2c85d-9fe6-403f-bc6d-d003d2a06aef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1208.121400] env[70020]: DEBUG oslo_vmware.api [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52d1c5af-6aa3-bfd8-a4c1-16f2134d484d, 'name': SearchDatastore_Task, 'duration_secs': 0.045665} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.121400] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1208.121583] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1208.121812] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1208.121957] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1208.122146] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1208.122396] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d650017c-8cb5-4745-a55c-e63c773cf380 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.131957] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1208.132151] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1208.132852] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e4d5273-cea7-453c-af79-feb1ed3aa8b1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.137965] env[70020]: DEBUG oslo_vmware.api [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1208.137965] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52967918-1829-f853-31ca-8b44a32caae5" [ 1208.137965] env[70020]: _type = "Task" [ 1208.137965] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.142834] env[70020]: DEBUG oslo_concurrency.lockutils [None req-167f6f67-32fb-41df-a049-5b5027aa1e73 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Releasing lock "refresh_cache-ca63297c-b7bc-45e9-8850-f46050905c26" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1208.143118] env[70020]: DEBUG nova.objects.instance [None req-167f6f67-32fb-41df-a049-5b5027aa1e73 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lazy-loading 'migration_context' on Instance uuid ca63297c-b7bc-45e9-8850-f46050905c26 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1208.149867] env[70020]: DEBUG oslo_vmware.api [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52967918-1829-f853-31ca-8b44a32caae5, 'name': SearchDatastore_Task, 'duration_secs': 0.009653} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.151207] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd55b575-d351-40ae-bdce-91f32e465b70 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.156607] env[70020]: DEBUG oslo_vmware.api [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1208.156607] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52c65867-ae63-c55e-8da3-4a498a5d888f" [ 1208.156607] env[70020]: _type = "Task" [ 1208.156607] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.164393] env[70020]: DEBUG oslo_vmware.api [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52c65867-ae63-c55e-8da3-4a498a5d888f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.197042] env[70020]: DEBUG oslo_concurrency.lockutils [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquiring lock "9e7bd10b-3a78-48d8-9b66-e3646635be6d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1208.197336] env[70020]: DEBUG oslo_concurrency.lockutils [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lock "9e7bd10b-3a78-48d8-9b66-e3646635be6d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1208.197568] env[70020]: DEBUG oslo_concurrency.lockutils [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquiring lock "9e7bd10b-3a78-48d8-9b66-e3646635be6d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1208.197804] env[70020]: DEBUG oslo_concurrency.lockutils [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lock "9e7bd10b-3a78-48d8-9b66-e3646635be6d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1208.198090] env[70020]: DEBUG oslo_concurrency.lockutils [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lock "9e7bd10b-3a78-48d8-9b66-e3646635be6d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1208.200202] env[70020]: INFO nova.compute.manager [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Terminating instance [ 1208.229077] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c3422626-10b2-4fac-8628-df6d6c60a453 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "interface-cc46e905-958e-4dc3-8f83-f8b5680f94de-36da126c-3f5a-43c8-98eb-774da4ecb681" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1208.229396] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c3422626-10b2-4fac-8628-df6d6c60a453 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "interface-cc46e905-958e-4dc3-8f83-f8b5680f94de-36da126c-3f5a-43c8-98eb-774da4ecb681" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1208.229854] env[70020]: DEBUG nova.objects.instance [None req-c3422626-10b2-4fac-8628-df6d6c60a453 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lazy-loading 'flavor' on Instance uuid cc46e905-958e-4dc3-8f83-f8b5680f94de {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1208.368031] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0dacb0f-6a9d-42c2-b5f7-e5a8fa1461e6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.389927] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-786bcfdd-b83e-4ae5-8133-0f8804e8a0f9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.397436] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Updating instance '9962b718-ca31-4f09-91f3-133dd68612ad' progress to 83 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1208.420062] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec5b2dc6-2461-414b-8b8f-d12198c4ad5b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.431767] env[70020]: DEBUG oslo_vmware.api [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619171, 'name': Rename_Task, 'duration_secs': 0.991703} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.434651] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1208.435076] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b1ffb680-c696-4a86-a070-b4a566606f76 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Doing hard reboot of VM {{(pid=70020) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1208.435306] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-75894a11-86dc-4859-ac13-be39c5368440 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.436867] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-1fe7c922-ef52-46c7-9fa4-a63548f92a2b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.442700] env[70020]: DEBUG oslo_vmware.api [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1208.442700] env[70020]: value = "task-3619172" [ 1208.442700] env[70020]: _type = "Task" [ 1208.442700] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.443866] env[70020]: DEBUG oslo_vmware.api [None req-b1ffb680-c696-4a86-a070-b4a566606f76 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Waiting for the task: (returnval){ [ 1208.443866] env[70020]: value = "task-3619173" [ 1208.443866] env[70020]: _type = "Task" [ 1208.443866] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.455349] env[70020]: DEBUG oslo_vmware.api [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619172, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.458600] env[70020]: DEBUG oslo_vmware.api [None req-b1ffb680-c696-4a86-a070-b4a566606f76 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Task: {'id': task-3619173, 'name': ResetVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.601116] env[70020]: DEBUG oslo_concurrency.lockutils [req-5e154f6e-2cd7-4323-9eb6-c54c34f73e4d req-996e246e-ee1d-43d5-b454-415b7449321d service nova] Releasing lock "refresh_cache-399d55b7-2a79-4849-89b6-ff8d1c0d33e1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1208.646487] env[70020]: DEBUG nova.objects.base [None req-167f6f67-32fb-41df-a049-5b5027aa1e73 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=70020) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1208.647368] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2de33729-6090-4694-b8ff-6b2edd3a9355 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.669360] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a352387-9c0b-40df-ba62-9875648fc297 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.677901] env[70020]: DEBUG oslo_vmware.api [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52c65867-ae63-c55e-8da3-4a498a5d888f, 'name': SearchDatastore_Task, 'duration_secs': 0.009202} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.679170] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1208.679442] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 399d55b7-2a79-4849-89b6-ff8d1c0d33e1/399d55b7-2a79-4849-89b6-ff8d1c0d33e1.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1208.679753] env[70020]: DEBUG oslo_vmware.api [None req-167f6f67-32fb-41df-a049-5b5027aa1e73 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1208.679753] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]523d1bcb-809b-c33d-1511-1bbdba8b3f61" [ 1208.679753] env[70020]: _type = "Task" [ 1208.679753] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.679932] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-26a588b5-dc57-4c20-9205-80cd84a8080b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.689881] env[70020]: DEBUG oslo_vmware.api [None req-167f6f67-32fb-41df-a049-5b5027aa1e73 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]523d1bcb-809b-c33d-1511-1bbdba8b3f61, 'name': SearchDatastore_Task, 'duration_secs': 0.007866} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.691033] env[70020]: DEBUG oslo_concurrency.lockutils [None req-167f6f67-32fb-41df-a049-5b5027aa1e73 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1208.691269] env[70020]: DEBUG oslo_concurrency.lockutils [None req-167f6f67-32fb-41df-a049-5b5027aa1e73 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1208.692630] env[70020]: DEBUG oslo_vmware.api [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1208.692630] env[70020]: value = "task-3619174" [ 1208.692630] env[70020]: _type = "Task" [ 1208.692630] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.700157] env[70020]: DEBUG oslo_vmware.api [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619174, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.704067] env[70020]: DEBUG nova.compute.manager [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1208.704291] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1208.705291] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64e4305b-22a7-4165-9845-8e1b6c9bba04 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.711997] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1208.712238] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c1fa5cb0-f351-4630-b74e-6be729515684 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.717638] env[70020]: DEBUG oslo_vmware.api [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1208.717638] env[70020]: value = "task-3619175" [ 1208.717638] env[70020]: _type = "Task" [ 1208.717638] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.724993] env[70020]: DEBUG oslo_vmware.api [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619175, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.817865] env[70020]: DEBUG nova.objects.instance [None req-c3422626-10b2-4fac-8628-df6d6c60a453 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lazy-loading 'pci_requests' on Instance uuid cc46e905-958e-4dc3-8f83-f8b5680f94de {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1208.904499] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1208.904816] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8a366270-b6ec-4fcf-8e42-22f4af5d929e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.912116] env[70020]: DEBUG oslo_vmware.api [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1208.912116] env[70020]: value = "task-3619176" [ 1208.912116] env[70020]: _type = "Task" [ 1208.912116] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.920783] env[70020]: DEBUG oslo_vmware.api [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619176, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.966770] env[70020]: DEBUG oslo_vmware.api [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619172, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.970353] env[70020]: DEBUG oslo_vmware.api [None req-b1ffb680-c696-4a86-a070-b4a566606f76 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Task: {'id': task-3619173, 'name': ResetVM_Task, 'duration_secs': 0.099867} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.970639] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b1ffb680-c696-4a86-a070-b4a566606f76 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Did hard reboot of VM {{(pid=70020) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1208.970829] env[70020]: DEBUG nova.compute.manager [None req-b1ffb680-c696-4a86-a070-b4a566606f76 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1208.971647] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25fff396-e4e2-403c-aa45-f0f3fcc3d149 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.134837] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1209.205471] env[70020]: DEBUG oslo_vmware.api [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619174, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.481057} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.205732] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 399d55b7-2a79-4849-89b6-ff8d1c0d33e1/399d55b7-2a79-4849-89b6-ff8d1c0d33e1.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1209.205949] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1209.206214] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a8253ca0-fe66-4423-8d56-33e2986f136b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.214898] env[70020]: DEBUG oslo_vmware.api [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1209.214898] env[70020]: value = "task-3619177" [ 1209.214898] env[70020]: _type = "Task" [ 1209.214898] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.224279] env[70020]: DEBUG oslo_vmware.api [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619177, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.231831] env[70020]: DEBUG oslo_vmware.api [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619175, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.322243] env[70020]: DEBUG nova.objects.base [None req-c3422626-10b2-4fac-8628-df6d6c60a453 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=70020) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1209.322512] env[70020]: DEBUG nova.network.neutron [None req-c3422626-10b2-4fac-8628-df6d6c60a453 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1209.369146] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e28d51b-fc89-49e0-8fe5-2b7e3d2833c7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.377784] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49e2f29d-86e5-4659-a650-e180c5186f8e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.409891] env[70020]: DEBUG nova.policy [None req-c3422626-10b2-4fac-8628-df6d6c60a453 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '914fc4078a214da891e7d12d242504cb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0573da12f56f4b18a103e4e9fdfb9c19', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1209.412352] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f572dd9b-b29a-46be-a13d-9ecdb3a17999 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.431414] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45ad3582-ec3e-4a7b-bef7-13a0076ec411 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.435625] env[70020]: DEBUG oslo_vmware.api [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619176, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.449544] env[70020]: DEBUG nova.compute.provider_tree [None req-167f6f67-32fb-41df-a049-5b5027aa1e73 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1209.459392] env[70020]: DEBUG oslo_vmware.api [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619172, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.484577] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b1ffb680-c696-4a86-a070-b4a566606f76 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Lock "c29d577e-9498-40b1-8e49-caff821cb80a" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.421s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1209.731153] env[70020]: DEBUG oslo_vmware.api [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619175, 'name': PowerOffVM_Task, 'duration_secs': 0.866244} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.735129] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1209.735379] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1209.735715] env[70020]: DEBUG oslo_vmware.api [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619177, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080965} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.735965] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-87bfc46a-2088-4d83-abf3-750309e063e1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.737829] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1209.738840] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f297113-e759-4e08-8ff7-c4b13737822b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.771602] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] 399d55b7-2a79-4849-89b6-ff8d1c0d33e1/399d55b7-2a79-4849-89b6-ff8d1c0d33e1.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1209.772423] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-645cc75a-a0d4-4d55-92a9-e278e0f91159 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.799915] env[70020]: DEBUG oslo_vmware.api [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1209.799915] env[70020]: value = "task-3619179" [ 1209.799915] env[70020]: _type = "Task" [ 1209.799915] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.801235] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1209.801464] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1209.801665] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Deleting the datastore file [datastore2] 9e7bd10b-3a78-48d8-9b66-e3646635be6d {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1209.804655] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9a3e9909-11fe-469f-ba52-3950f09e3313 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.811614] env[70020]: DEBUG oslo_vmware.api [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619179, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.812866] env[70020]: DEBUG oslo_vmware.api [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1209.812866] env[70020]: value = "task-3619180" [ 1209.812866] env[70020]: _type = "Task" [ 1209.812866] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.820721] env[70020]: DEBUG oslo_vmware.api [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619180, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.925207] env[70020]: DEBUG oslo_concurrency.lockutils [None req-20731126-2c04-471d-977c-19f712b3d334 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Acquiring lock "c29d577e-9498-40b1-8e49-caff821cb80a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.925207] env[70020]: DEBUG oslo_concurrency.lockutils [None req-20731126-2c04-471d-977c-19f712b3d334 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Lock "c29d577e-9498-40b1-8e49-caff821cb80a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1209.925429] env[70020]: DEBUG oslo_concurrency.lockutils [None req-20731126-2c04-471d-977c-19f712b3d334 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Acquiring lock "c29d577e-9498-40b1-8e49-caff821cb80a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.925624] env[70020]: DEBUG oslo_concurrency.lockutils [None req-20731126-2c04-471d-977c-19f712b3d334 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Lock "c29d577e-9498-40b1-8e49-caff821cb80a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1209.925793] env[70020]: DEBUG oslo_concurrency.lockutils [None req-20731126-2c04-471d-977c-19f712b3d334 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Lock "c29d577e-9498-40b1-8e49-caff821cb80a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1209.928162] env[70020]: DEBUG oslo_vmware.api [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619176, 'name': PowerOnVM_Task, 'duration_secs': 0.589944} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.928668] env[70020]: INFO nova.compute.manager [None req-20731126-2c04-471d-977c-19f712b3d334 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Terminating instance [ 1209.930085] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1209.930298] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-16d585f0-4ca9-4633-bf2f-b1690654328a tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Updating instance '9962b718-ca31-4f09-91f3-133dd68612ad' progress to 100 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1209.961738] env[70020]: DEBUG oslo_vmware.api [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619172, 'name': PowerOnVM_Task, 'duration_secs': 1.451762} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.961738] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1209.961981] env[70020]: INFO nova.compute.manager [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Took 9.20 seconds to spawn the instance on the hypervisor. [ 1209.961981] env[70020]: DEBUG nova.compute.manager [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1209.963143] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa60bec6-fbbf-483a-9dfe-291b48e8a7fc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.976658] env[70020]: ERROR nova.scheduler.client.report [None req-167f6f67-32fb-41df-a049-5b5027aa1e73 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [req-8314448b-6f0d-4f8a-ab09-081d0f4aa7ed] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-8314448b-6f0d-4f8a-ab09-081d0f4aa7ed"}]} [ 1209.996886] env[70020]: DEBUG nova.scheduler.client.report [None req-167f6f67-32fb-41df-a049-5b5027aa1e73 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1210.011949] env[70020]: DEBUG nova.scheduler.client.report [None req-167f6f67-32fb-41df-a049-5b5027aa1e73 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1210.012196] env[70020]: DEBUG nova.compute.provider_tree [None req-167f6f67-32fb-41df-a049-5b5027aa1e73 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1210.023948] env[70020]: DEBUG nova.scheduler.client.report [None req-167f6f67-32fb-41df-a049-5b5027aa1e73 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1210.042608] env[70020]: DEBUG nova.scheduler.client.report [None req-167f6f67-32fb-41df-a049-5b5027aa1e73 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1210.204085] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39e340b3-3b65-48c7-a0f3-20686e8693f0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.211465] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8f934c3-09ce-4bb0-9982-795de983f478 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.242013] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f0df358-e769-4d45-a02c-2d053bc53853 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.249515] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9931d506-ae6c-4445-9325-4ee5efa1e44e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.262795] env[70020]: DEBUG nova.compute.provider_tree [None req-167f6f67-32fb-41df-a049-5b5027aa1e73 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1210.310792] env[70020]: DEBUG oslo_vmware.api [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619179, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.321233] env[70020]: DEBUG oslo_vmware.api [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619180, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.461052} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.321476] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1210.321662] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1210.321842] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1210.322016] env[70020]: INFO nova.compute.manager [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Took 1.62 seconds to destroy the instance on the hypervisor. [ 1210.322257] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1210.322446] env[70020]: DEBUG nova.compute.manager [-] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1210.322542] env[70020]: DEBUG nova.network.neutron [-] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1210.439315] env[70020]: DEBUG nova.compute.manager [None req-20731126-2c04-471d-977c-19f712b3d334 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1210.439315] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-20731126-2c04-471d-977c-19f712b3d334 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1210.440365] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f32da93d-9410-4fb4-ae90-4e561b468659 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.448101] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-20731126-2c04-471d-977c-19f712b3d334 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1210.448376] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-52ebe32f-7b42-460e-871b-f5986896ac81 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.456024] env[70020]: DEBUG oslo_vmware.api [None req-20731126-2c04-471d-977c-19f712b3d334 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Waiting for the task: (returnval){ [ 1210.456024] env[70020]: value = "task-3619181" [ 1210.456024] env[70020]: _type = "Task" [ 1210.456024] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.468477] env[70020]: DEBUG oslo_vmware.api [None req-20731126-2c04-471d-977c-19f712b3d334 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Task: {'id': task-3619181, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.482072] env[70020]: INFO nova.compute.manager [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Took 18.71 seconds to build instance. [ 1210.795503] env[70020]: DEBUG nova.scheduler.client.report [None req-167f6f67-32fb-41df-a049-5b5027aa1e73 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updated inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with generation 176 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1210.795799] env[70020]: DEBUG nova.compute.provider_tree [None req-167f6f67-32fb-41df-a049-5b5027aa1e73 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updating resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d generation from 176 to 177 during operation: update_inventory {{(pid=70020) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1210.795984] env[70020]: DEBUG nova.compute.provider_tree [None req-167f6f67-32fb-41df-a049-5b5027aa1e73 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1210.810536] env[70020]: DEBUG oslo_vmware.api [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619179, 'name': ReconfigVM_Task, 'duration_secs': 0.861748} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.810823] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Reconfigured VM instance instance-00000076 to attach disk [datastore1] 399d55b7-2a79-4849-89b6-ff8d1c0d33e1/399d55b7-2a79-4849-89b6-ff8d1c0d33e1.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1210.811470] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b31172f0-9b2b-4d0d-8c19-477374349cd0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.819558] env[70020]: DEBUG oslo_vmware.api [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1210.819558] env[70020]: value = "task-3619182" [ 1210.819558] env[70020]: _type = "Task" [ 1210.819558] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.828774] env[70020]: DEBUG oslo_vmware.api [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619182, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.857412] env[70020]: DEBUG nova.compute.manager [req-d924ca63-2634-4bc1-bc9c-f52f647d972d req-4417195d-c89b-4a88-bf1b-5a1c648d5e1f service nova] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Received event network-vif-deleted-2573d470-4c75-40c7-9e9b-6130f5e14092 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1210.858064] env[70020]: INFO nova.compute.manager [req-d924ca63-2634-4bc1-bc9c-f52f647d972d req-4417195d-c89b-4a88-bf1b-5a1c648d5e1f service nova] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Neutron deleted interface 2573d470-4c75-40c7-9e9b-6130f5e14092; detaching it from the instance and deleting it from the info cache [ 1210.859267] env[70020]: DEBUG nova.network.neutron [req-d924ca63-2634-4bc1-bc9c-f52f647d972d req-4417195d-c89b-4a88-bf1b-5a1c648d5e1f service nova] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1210.966691] env[70020]: DEBUG oslo_vmware.api [None req-20731126-2c04-471d-977c-19f712b3d334 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Task: {'id': task-3619181, 'name': PowerOffVM_Task, 'duration_secs': 0.492945} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.969943] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-20731126-2c04-471d-977c-19f712b3d334 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1210.969943] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-20731126-2c04-471d-977c-19f712b3d334 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1210.969943] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bc43220c-b462-4c50-8037-84d500c15fd6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.988027] env[70020]: DEBUG oslo_concurrency.lockutils [None req-91e53cd4-ec25-4fba-9468-70b3c8678424 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "c9a3fb0f-95bf-4b51-ac06-99415acfa9cb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.215s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1211.033495] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-20731126-2c04-471d-977c-19f712b3d334 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1211.034075] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-20731126-2c04-471d-977c-19f712b3d334 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1211.037104] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-20731126-2c04-471d-977c-19f712b3d334 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Deleting the datastore file [datastore1] c29d577e-9498-40b1-8e49-caff821cb80a {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1211.037104] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ccab5763-692b-482d-99de-fd4269e54287 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.041102] env[70020]: DEBUG oslo_vmware.api [None req-20731126-2c04-471d-977c-19f712b3d334 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Waiting for the task: (returnval){ [ 1211.041102] env[70020]: value = "task-3619184" [ 1211.041102] env[70020]: _type = "Task" [ 1211.041102] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.053021] env[70020]: DEBUG oslo_vmware.api [None req-20731126-2c04-471d-977c-19f712b3d334 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Task: {'id': task-3619184, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.127061] env[70020]: DEBUG nova.network.neutron [None req-c3422626-10b2-4fac-8628-df6d6c60a453 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Successfully updated port: 36da126c-3f5a-43c8-98eb-774da4ecb681 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1211.265037] env[70020]: DEBUG nova.network.neutron [-] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1211.329880] env[70020]: DEBUG oslo_vmware.api [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619182, 'name': Rename_Task, 'duration_secs': 0.173272} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.330172] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1211.330420] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e9d23b8c-85e8-4e8d-8c3e-7188d4926484 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.336423] env[70020]: DEBUG oslo_vmware.api [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1211.336423] env[70020]: value = "task-3619185" [ 1211.336423] env[70020]: _type = "Task" [ 1211.336423] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.344694] env[70020]: DEBUG oslo_vmware.api [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619185, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.361832] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b2770547-a972-40d1-9eae-2d4134efc819 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.371169] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb1fdd7-54d5-4bc5-a27e-04fe0355d976 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.389377] env[70020]: DEBUG nova.compute.manager [req-e81103cd-23c4-43c7-88af-25843cbce163 req-9e44ab10-7580-4bd6-a95f-c41836b4bb53 service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Received event network-vif-plugged-36da126c-3f5a-43c8-98eb-774da4ecb681 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1211.389646] env[70020]: DEBUG oslo_concurrency.lockutils [req-e81103cd-23c4-43c7-88af-25843cbce163 req-9e44ab10-7580-4bd6-a95f-c41836b4bb53 service nova] Acquiring lock "cc46e905-958e-4dc3-8f83-f8b5680f94de-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1211.391982] env[70020]: DEBUG oslo_concurrency.lockutils [req-e81103cd-23c4-43c7-88af-25843cbce163 req-9e44ab10-7580-4bd6-a95f-c41836b4bb53 service nova] Lock "cc46e905-958e-4dc3-8f83-f8b5680f94de-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1211.392643] env[70020]: DEBUG oslo_concurrency.lockutils [req-e81103cd-23c4-43c7-88af-25843cbce163 req-9e44ab10-7580-4bd6-a95f-c41836b4bb53 service nova] Lock "cc46e905-958e-4dc3-8f83-f8b5680f94de-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.002s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1211.393463] env[70020]: DEBUG nova.compute.manager [req-e81103cd-23c4-43c7-88af-25843cbce163 req-9e44ab10-7580-4bd6-a95f-c41836b4bb53 service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] No waiting events found dispatching network-vif-plugged-36da126c-3f5a-43c8-98eb-774da4ecb681 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1211.394102] env[70020]: WARNING nova.compute.manager [req-e81103cd-23c4-43c7-88af-25843cbce163 req-9e44ab10-7580-4bd6-a95f-c41836b4bb53 service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Received unexpected event network-vif-plugged-36da126c-3f5a-43c8-98eb-774da4ecb681 for instance with vm_state active and task_state None. [ 1211.407026] env[70020]: DEBUG nova.compute.manager [req-d924ca63-2634-4bc1-bc9c-f52f647d972d req-4417195d-c89b-4a88-bf1b-5a1c648d5e1f service nova] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Detach interface failed, port_id=2573d470-4c75-40c7-9e9b-6130f5e14092, reason: Instance 9e7bd10b-3a78-48d8-9b66-e3646635be6d could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1211.551816] env[70020]: DEBUG oslo_vmware.api [None req-20731126-2c04-471d-977c-19f712b3d334 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Task: {'id': task-3619184, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.4154} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.552281] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-20731126-2c04-471d-977c-19f712b3d334 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1211.552281] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-20731126-2c04-471d-977c-19f712b3d334 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1211.552450] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-20731126-2c04-471d-977c-19f712b3d334 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1211.552622] env[70020]: INFO nova.compute.manager [None req-20731126-2c04-471d-977c-19f712b3d334 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1211.552864] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-20731126-2c04-471d-977c-19f712b3d334 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1211.553072] env[70020]: DEBUG nova.compute.manager [-] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1211.553177] env[70020]: DEBUG nova.network.neutron [-] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1211.632365] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c3422626-10b2-4fac-8628-df6d6c60a453 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "refresh_cache-cc46e905-958e-4dc3-8f83-f8b5680f94de" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1211.632611] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c3422626-10b2-4fac-8628-df6d6c60a453 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquired lock "refresh_cache-cc46e905-958e-4dc3-8f83-f8b5680f94de" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1211.632838] env[70020]: DEBUG nova.network.neutron [None req-c3422626-10b2-4fac-8628-df6d6c60a453 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1211.768585] env[70020]: INFO nova.compute.manager [-] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Took 1.45 seconds to deallocate network for instance. [ 1211.809366] env[70020]: DEBUG oslo_concurrency.lockutils [None req-167f6f67-32fb-41df-a049-5b5027aa1e73 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 3.117s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1211.847522] env[70020]: DEBUG oslo_vmware.api [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619185, 'name': PowerOnVM_Task} progress is 90%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.171271] env[70020]: WARNING nova.network.neutron [None req-c3422626-10b2-4fac-8628-df6d6c60a453 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] 95372772-c776-417b-938a-f27c0d43d6ec already exists in list: networks containing: ['95372772-c776-417b-938a-f27c0d43d6ec']. ignoring it [ 1212.171487] env[70020]: WARNING nova.network.neutron [None req-c3422626-10b2-4fac-8628-df6d6c60a453 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] 95372772-c776-417b-938a-f27c0d43d6ec already exists in list: networks containing: ['95372772-c776-417b-938a-f27c0d43d6ec']. ignoring it [ 1212.277154] env[70020]: DEBUG oslo_concurrency.lockutils [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1212.277465] env[70020]: DEBUG oslo_concurrency.lockutils [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1212.277697] env[70020]: DEBUG nova.objects.instance [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lazy-loading 'resources' on Instance uuid 9e7bd10b-3a78-48d8-9b66-e3646635be6d {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1212.329027] env[70020]: DEBUG nova.network.neutron [-] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1212.352775] env[70020]: DEBUG oslo_vmware.api [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619185, 'name': PowerOnVM_Task, 'duration_secs': 0.585802} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.353032] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1212.353243] env[70020]: INFO nova.compute.manager [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Took 7.65 seconds to spawn the instance on the hypervisor. [ 1212.353420] env[70020]: DEBUG nova.compute.manager [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1212.354240] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-198f6467-36ce-442e-b32d-e624c9ce9875 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.389641] env[70020]: INFO nova.scheduler.client.report [None req-167f6f67-32fb-41df-a049-5b5027aa1e73 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Deleted allocation for migration a8690c1d-73a1-4113-9ff2-0115c012fdfd [ 1212.434253] env[70020]: DEBUG nova.network.neutron [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Port 6c9093b6-4b0e-4ac8-b1b1-4e54e43363a5 binding to destination host cpu-1 is already ACTIVE {{(pid=70020) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1212.434253] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "refresh_cache-9962b718-ca31-4f09-91f3-133dd68612ad" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1212.434253] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquired lock "refresh_cache-9962b718-ca31-4f09-91f3-133dd68612ad" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1212.434609] env[70020]: DEBUG nova.network.neutron [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1212.711889] env[70020]: DEBUG nova.network.neutron [None req-c3422626-10b2-4fac-8628-df6d6c60a453 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Updating instance_info_cache with network_info: [{"id": "0c413503-8ce1-454a-a6b0-3fb75d647a04", "address": "fa:16:3e:ca:8d:a7", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c413503-8c", "ovs_interfaceid": "0c413503-8ce1-454a-a6b0-3fb75d647a04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "db65a4ad-ec52-4dd1-bb59-3c000719f018", "address": "fa:16:3e:6b:ce:f5", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb65a4ad-ec", "ovs_interfaceid": "db65a4ad-ec52-4dd1-bb59-3c000719f018", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "36da126c-3f5a-43c8-98eb-774da4ecb681", "address": "fa:16:3e:7c:92:c8", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36da126c-3f", "ovs_interfaceid": "36da126c-3f5a-43c8-98eb-774da4ecb681", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1212.833048] env[70020]: INFO nova.compute.manager [-] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Took 1.28 seconds to deallocate network for instance. [ 1212.875797] env[70020]: INFO nova.compute.manager [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Took 12.47 seconds to build instance. [ 1212.897839] env[70020]: DEBUG oslo_concurrency.lockutils [None req-167f6f67-32fb-41df-a049-5b5027aa1e73 tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "ca63297c-b7bc-45e9-8850-f46050905c26" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.245s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1212.972570] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7641402d-6999-43c7-9e22-8132282ac095 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.980909] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9712e39d-bd5e-410a-a163-315c9b752e71 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.014390] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7ce3228-f99d-4080-9ced-625a0ca127f5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.022483] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1bda021-5cf6-4aca-b71d-79aaf9aa8889 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.036954] env[70020]: DEBUG nova.compute.provider_tree [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1213.215551] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c3422626-10b2-4fac-8628-df6d6c60a453 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Releasing lock "refresh_cache-cc46e905-958e-4dc3-8f83-f8b5680f94de" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1213.216273] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c3422626-10b2-4fac-8628-df6d6c60a453 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "cc46e905-958e-4dc3-8f83-f8b5680f94de" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1213.216524] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c3422626-10b2-4fac-8628-df6d6c60a453 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquired lock "cc46e905-958e-4dc3-8f83-f8b5680f94de" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1213.217411] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-766c95e0-83ca-40f6-998d-b62b3703ca5d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.237340] env[70020]: DEBUG nova.virt.hardware [None req-c3422626-10b2-4fac-8628-df6d6c60a453 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1213.237582] env[70020]: DEBUG nova.virt.hardware [None req-c3422626-10b2-4fac-8628-df6d6c60a453 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1213.237737] env[70020]: DEBUG nova.virt.hardware [None req-c3422626-10b2-4fac-8628-df6d6c60a453 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1213.237914] env[70020]: DEBUG nova.virt.hardware [None req-c3422626-10b2-4fac-8628-df6d6c60a453 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1213.238067] env[70020]: DEBUG nova.virt.hardware [None req-c3422626-10b2-4fac-8628-df6d6c60a453 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1213.238247] env[70020]: DEBUG nova.virt.hardware [None req-c3422626-10b2-4fac-8628-df6d6c60a453 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1213.238468] env[70020]: DEBUG nova.virt.hardware [None req-c3422626-10b2-4fac-8628-df6d6c60a453 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1213.238627] env[70020]: DEBUG nova.virt.hardware [None req-c3422626-10b2-4fac-8628-df6d6c60a453 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1213.238785] env[70020]: DEBUG nova.virt.hardware [None req-c3422626-10b2-4fac-8628-df6d6c60a453 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1213.238942] env[70020]: DEBUG nova.virt.hardware [None req-c3422626-10b2-4fac-8628-df6d6c60a453 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1213.239123] env[70020]: DEBUG nova.virt.hardware [None req-c3422626-10b2-4fac-8628-df6d6c60a453 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1213.245460] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c3422626-10b2-4fac-8628-df6d6c60a453 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Reconfiguring VM to attach interface {{(pid=70020) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1213.249046] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-74c526d6-668c-49f6-a56f-698489a66918 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.269834] env[70020]: DEBUG oslo_vmware.api [None req-c3422626-10b2-4fac-8628-df6d6c60a453 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1213.269834] env[70020]: value = "task-3619187" [ 1213.269834] env[70020]: _type = "Task" [ 1213.269834] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.278567] env[70020]: DEBUG oslo_vmware.api [None req-c3422626-10b2-4fac-8628-df6d6c60a453 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619187, 'name': ReconfigVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.338544] env[70020]: DEBUG oslo_concurrency.lockutils [None req-20731126-2c04-471d-977c-19f712b3d334 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1213.377962] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf55471b-d562-41a2-9c77-54d11dd84209 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lock "399d55b7-2a79-4849-89b6-ff8d1c0d33e1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.979s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1213.426560] env[70020]: DEBUG nova.network.neutron [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Updating instance_info_cache with network_info: [{"id": "6c9093b6-4b0e-4ac8-b1b1-4e54e43363a5", "address": "fa:16:3e:24:81:fe", "network": {"id": "ba2c4d3c-4191-4de5-8533-90ca5dfc1dc0", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-599216784-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e3eae740ef84ef88aef113ed4d6e57b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c9093b6-4b", "ovs_interfaceid": "6c9093b6-4b0e-4ac8-b1b1-4e54e43363a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1213.561600] env[70020]: ERROR nova.scheduler.client.report [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [req-3a84f668-812c-4335-aff3-c7edb07ac164] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-3a84f668-812c-4335-aff3-c7edb07ac164"}]} [ 1213.577720] env[70020]: DEBUG nova.scheduler.client.report [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1213.593668] env[70020]: DEBUG nova.scheduler.client.report [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1213.593974] env[70020]: DEBUG nova.compute.provider_tree [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1213.605353] env[70020]: DEBUG nova.scheduler.client.report [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1213.625290] env[70020]: DEBUG nova.scheduler.client.report [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1213.765208] env[70020]: DEBUG oslo_concurrency.lockutils [None req-644ea31d-5ea1-40d4-a9b9-4728140f2d7f tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "ca63297c-b7bc-45e9-8850-f46050905c26" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1213.765471] env[70020]: DEBUG oslo_concurrency.lockutils [None req-644ea31d-5ea1-40d4-a9b9-4728140f2d7f tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "ca63297c-b7bc-45e9-8850-f46050905c26" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1213.765682] env[70020]: DEBUG oslo_concurrency.lockutils [None req-644ea31d-5ea1-40d4-a9b9-4728140f2d7f tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "ca63297c-b7bc-45e9-8850-f46050905c26-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1213.765868] env[70020]: DEBUG oslo_concurrency.lockutils [None req-644ea31d-5ea1-40d4-a9b9-4728140f2d7f tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "ca63297c-b7bc-45e9-8850-f46050905c26-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1213.766043] env[70020]: DEBUG oslo_concurrency.lockutils [None req-644ea31d-5ea1-40d4-a9b9-4728140f2d7f tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "ca63297c-b7bc-45e9-8850-f46050905c26-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1213.775253] env[70020]: INFO nova.compute.manager [None req-644ea31d-5ea1-40d4-a9b9-4728140f2d7f tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Terminating instance [ 1213.789747] env[70020]: DEBUG oslo_vmware.api [None req-c3422626-10b2-4fac-8628-df6d6c60a453 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619187, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.806680] env[70020]: DEBUG nova.compute.manager [req-1b43248e-1a91-479b-8fd2-002f4f97cefb req-0b924be6-c3ff-41de-b104-f4029391b18f service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Received event network-changed-36da126c-3f5a-43c8-98eb-774da4ecb681 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1213.807014] env[70020]: DEBUG nova.compute.manager [req-1b43248e-1a91-479b-8fd2-002f4f97cefb req-0b924be6-c3ff-41de-b104-f4029391b18f service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Refreshing instance network info cache due to event network-changed-36da126c-3f5a-43c8-98eb-774da4ecb681. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1213.807128] env[70020]: DEBUG oslo_concurrency.lockutils [req-1b43248e-1a91-479b-8fd2-002f4f97cefb req-0b924be6-c3ff-41de-b104-f4029391b18f service nova] Acquiring lock "refresh_cache-cc46e905-958e-4dc3-8f83-f8b5680f94de" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1213.807261] env[70020]: DEBUG oslo_concurrency.lockutils [req-1b43248e-1a91-479b-8fd2-002f4f97cefb req-0b924be6-c3ff-41de-b104-f4029391b18f service nova] Acquired lock "refresh_cache-cc46e905-958e-4dc3-8f83-f8b5680f94de" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1213.807526] env[70020]: DEBUG nova.network.neutron [req-1b43248e-1a91-479b-8fd2-002f4f97cefb req-0b924be6-c3ff-41de-b104-f4029391b18f service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Refreshing network info cache for port 36da126c-3f5a-43c8-98eb-774da4ecb681 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1213.818628] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea478722-4367-4063-96db-df973ada98a3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.829071] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ff50207-26c2-49ae-b64e-574d5e4fb457 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.864451] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1b3bb67-e024-4339-9b98-5269abddc493 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.873801] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d2835da-e052-43b0-865e-5539e86d0e24 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.891821] env[70020]: DEBUG nova.compute.provider_tree [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1213.929727] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Releasing lock "refresh_cache-9962b718-ca31-4f09-91f3-133dd68612ad" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1213.960717] env[70020]: DEBUG nova.compute.manager [req-e8007645-05f5-404b-9a62-a56db2c5f9de req-590986ca-79bf-4683-ba60-c64199ec187f service nova] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Received event network-changed-b3b2c85d-9fe6-403f-bc6d-d003d2a06aef {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1213.960717] env[70020]: DEBUG nova.compute.manager [req-e8007645-05f5-404b-9a62-a56db2c5f9de req-590986ca-79bf-4683-ba60-c64199ec187f service nova] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Refreshing instance network info cache due to event network-changed-b3b2c85d-9fe6-403f-bc6d-d003d2a06aef. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1213.960898] env[70020]: DEBUG oslo_concurrency.lockutils [req-e8007645-05f5-404b-9a62-a56db2c5f9de req-590986ca-79bf-4683-ba60-c64199ec187f service nova] Acquiring lock "refresh_cache-399d55b7-2a79-4849-89b6-ff8d1c0d33e1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1213.960995] env[70020]: DEBUG oslo_concurrency.lockutils [req-e8007645-05f5-404b-9a62-a56db2c5f9de req-590986ca-79bf-4683-ba60-c64199ec187f service nova] Acquired lock "refresh_cache-399d55b7-2a79-4849-89b6-ff8d1c0d33e1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1213.961204] env[70020]: DEBUG nova.network.neutron [req-e8007645-05f5-404b-9a62-a56db2c5f9de req-590986ca-79bf-4683-ba60-c64199ec187f service nova] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Refreshing network info cache for port b3b2c85d-9fe6-403f-bc6d-d003d2a06aef {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1214.280038] env[70020]: DEBUG oslo_vmware.api [None req-c3422626-10b2-4fac-8628-df6d6c60a453 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619187, 'name': ReconfigVM_Task, 'duration_secs': 0.62235} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.280579] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c3422626-10b2-4fac-8628-df6d6c60a453 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Releasing lock "cc46e905-958e-4dc3-8f83-f8b5680f94de" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1214.280877] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c3422626-10b2-4fac-8628-df6d6c60a453 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Reconfigured VM to attach interface {{(pid=70020) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1214.283968] env[70020]: DEBUG nova.compute.manager [None req-644ea31d-5ea1-40d4-a9b9-4728140f2d7f tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1214.284181] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-644ea31d-5ea1-40d4-a9b9-4728140f2d7f tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1214.285144] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0b12fff-2728-4082-8d91-55259dc6026a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.292182] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-644ea31d-5ea1-40d4-a9b9-4728140f2d7f tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1214.292403] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6213a3e2-4f2f-41ed-8562-0f15fb640f56 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.297655] env[70020]: DEBUG oslo_vmware.api [None req-644ea31d-5ea1-40d4-a9b9-4728140f2d7f tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1214.297655] env[70020]: value = "task-3619188" [ 1214.297655] env[70020]: _type = "Task" [ 1214.297655] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.306809] env[70020]: DEBUG oslo_vmware.api [None req-644ea31d-5ea1-40d4-a9b9-4728140f2d7f tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619188, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.403817] env[70020]: DEBUG oslo_concurrency.lockutils [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "115a8a58-d3ce-4778-9bc7-c75d0007b499" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1214.404055] env[70020]: DEBUG oslo_concurrency.lockutils [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "115a8a58-d3ce-4778-9bc7-c75d0007b499" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1214.414611] env[70020]: ERROR nova.scheduler.client.report [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [req-3228ec11-65f5-4a55-9e6c-0cb488facb65] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-3228ec11-65f5-4a55-9e6c-0cb488facb65"}]} [ 1214.431728] env[70020]: DEBUG nova.scheduler.client.report [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1214.434871] env[70020]: DEBUG nova.compute.manager [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=70020) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1214.435047] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1214.447975] env[70020]: DEBUG nova.scheduler.client.report [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1214.448222] env[70020]: DEBUG nova.compute.provider_tree [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1214.461316] env[70020]: DEBUG nova.scheduler.client.report [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1214.484414] env[70020]: DEBUG nova.scheduler.client.report [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1214.603161] env[70020]: DEBUG nova.network.neutron [req-1b43248e-1a91-479b-8fd2-002f4f97cefb req-0b924be6-c3ff-41de-b104-f4029391b18f service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Updated VIF entry in instance network info cache for port 36da126c-3f5a-43c8-98eb-774da4ecb681. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1214.603712] env[70020]: DEBUG nova.network.neutron [req-1b43248e-1a91-479b-8fd2-002f4f97cefb req-0b924be6-c3ff-41de-b104-f4029391b18f service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Updating instance_info_cache with network_info: [{"id": "0c413503-8ce1-454a-a6b0-3fb75d647a04", "address": "fa:16:3e:ca:8d:a7", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c413503-8c", "ovs_interfaceid": "0c413503-8ce1-454a-a6b0-3fb75d647a04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "db65a4ad-ec52-4dd1-bb59-3c000719f018", "address": "fa:16:3e:6b:ce:f5", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb65a4ad-ec", "ovs_interfaceid": "db65a4ad-ec52-4dd1-bb59-3c000719f018", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "36da126c-3f5a-43c8-98eb-774da4ecb681", "address": "fa:16:3e:7c:92:c8", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36da126c-3f", "ovs_interfaceid": "36da126c-3f5a-43c8-98eb-774da4ecb681", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1214.733747] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2657729d-117c-4707-ae47-aa9f58897fc4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.742071] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1e9e3da-bb52-4e80-8c16-fa45f6cd8d7e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.776591] env[70020]: DEBUG nova.network.neutron [req-e8007645-05f5-404b-9a62-a56db2c5f9de req-590986ca-79bf-4683-ba60-c64199ec187f service nova] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Updated VIF entry in instance network info cache for port b3b2c85d-9fe6-403f-bc6d-d003d2a06aef. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1214.777016] env[70020]: DEBUG nova.network.neutron [req-e8007645-05f5-404b-9a62-a56db2c5f9de req-590986ca-79bf-4683-ba60-c64199ec187f service nova] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Updating instance_info_cache with network_info: [{"id": "b3b2c85d-9fe6-403f-bc6d-d003d2a06aef", "address": "fa:16:3e:d5:73:92", "network": {"id": "ed5fa951-66b0-45af-8569-314b06003130", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1735933033-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11384e127368415d82f2e8a7e985b17e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "af454577-0e89-41a3-a9f2-f39716f62fd5", "external-id": "nsx-vlan-transportzone-63", "segmentation_id": 63, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3b2c85d-9f", "ovs_interfaceid": "b3b2c85d-9fe6-403f-bc6d-d003d2a06aef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1214.778954] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d80a0594-3925-47de-9e32-8672880ecfe9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.786008] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c3422626-10b2-4fac-8628-df6d6c60a453 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "interface-cc46e905-958e-4dc3-8f83-f8b5680f94de-36da126c-3f5a-43c8-98eb-774da4ecb681" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.557s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1214.788118] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb3d9e94-165f-4d7d-86bd-1ae47114fd89 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.803444] env[70020]: DEBUG nova.compute.provider_tree [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1214.813550] env[70020]: DEBUG oslo_vmware.api [None req-644ea31d-5ea1-40d4-a9b9-4728140f2d7f tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619188, 'name': PowerOffVM_Task, 'duration_secs': 0.332992} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.813842] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-644ea31d-5ea1-40d4-a9b9-4728140f2d7f tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1214.813981] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-644ea31d-5ea1-40d4-a9b9-4728140f2d7f tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1214.814268] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9844218d-a31b-4911-9ca6-e2db9d091954 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.907170] env[70020]: DEBUG nova.compute.manager [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1215.107862] env[70020]: DEBUG oslo_concurrency.lockutils [req-1b43248e-1a91-479b-8fd2-002f4f97cefb req-0b924be6-c3ff-41de-b104-f4029391b18f service nova] Releasing lock "refresh_cache-cc46e905-958e-4dc3-8f83-f8b5680f94de" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1215.108118] env[70020]: DEBUG nova.compute.manager [req-1b43248e-1a91-479b-8fd2-002f4f97cefb req-0b924be6-c3ff-41de-b104-f4029391b18f service nova] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Received event network-changed-9912a098-b09d-4c69-819f-47a4d7da500b {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1215.108295] env[70020]: DEBUG nova.compute.manager [req-1b43248e-1a91-479b-8fd2-002f4f97cefb req-0b924be6-c3ff-41de-b104-f4029391b18f service nova] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Refreshing instance network info cache due to event network-changed-9912a098-b09d-4c69-819f-47a4d7da500b. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1215.108511] env[70020]: DEBUG oslo_concurrency.lockutils [req-1b43248e-1a91-479b-8fd2-002f4f97cefb req-0b924be6-c3ff-41de-b104-f4029391b18f service nova] Acquiring lock "refresh_cache-c9a3fb0f-95bf-4b51-ac06-99415acfa9cb" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1215.108655] env[70020]: DEBUG oslo_concurrency.lockutils [req-1b43248e-1a91-479b-8fd2-002f4f97cefb req-0b924be6-c3ff-41de-b104-f4029391b18f service nova] Acquired lock "refresh_cache-c9a3fb0f-95bf-4b51-ac06-99415acfa9cb" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1215.108813] env[70020]: DEBUG nova.network.neutron [req-1b43248e-1a91-479b-8fd2-002f4f97cefb req-0b924be6-c3ff-41de-b104-f4029391b18f service nova] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Refreshing network info cache for port 9912a098-b09d-4c69-819f-47a4d7da500b {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1215.282242] env[70020]: DEBUG oslo_concurrency.lockutils [req-e8007645-05f5-404b-9a62-a56db2c5f9de req-590986ca-79bf-4683-ba60-c64199ec187f service nova] Releasing lock "refresh_cache-399d55b7-2a79-4849-89b6-ff8d1c0d33e1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1215.340987] env[70020]: DEBUG nova.scheduler.client.report [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Updated inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with generation 179 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1215.341295] env[70020]: DEBUG nova.compute.provider_tree [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Updating resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d generation from 179 to 180 during operation: update_inventory {{(pid=70020) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1215.341487] env[70020]: DEBUG nova.compute.provider_tree [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1215.433057] env[70020]: DEBUG oslo_concurrency.lockutils [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1215.444974] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-644ea31d-5ea1-40d4-a9b9-4728140f2d7f tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1215.445279] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-644ea31d-5ea1-40d4-a9b9-4728140f2d7f tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1215.445497] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-644ea31d-5ea1-40d4-a9b9-4728140f2d7f tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Deleting the datastore file [datastore1] ca63297c-b7bc-45e9-8850-f46050905c26 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1215.445836] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-48332566-2757-40a0-b83d-e6349038df4c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.455108] env[70020]: DEBUG oslo_vmware.api [None req-644ea31d-5ea1-40d4-a9b9-4728140f2d7f tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1215.455108] env[70020]: value = "task-3619190" [ 1215.455108] env[70020]: _type = "Task" [ 1215.455108] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.463910] env[70020]: DEBUG oslo_vmware.api [None req-644ea31d-5ea1-40d4-a9b9-4728140f2d7f tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619190, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.820908] env[70020]: DEBUG nova.network.neutron [req-1b43248e-1a91-479b-8fd2-002f4f97cefb req-0b924be6-c3ff-41de-b104-f4029391b18f service nova] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Updated VIF entry in instance network info cache for port 9912a098-b09d-4c69-819f-47a4d7da500b. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1215.821324] env[70020]: DEBUG nova.network.neutron [req-1b43248e-1a91-479b-8fd2-002f4f97cefb req-0b924be6-c3ff-41de-b104-f4029391b18f service nova] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Updating instance_info_cache with network_info: [{"id": "9912a098-b09d-4c69-819f-47a4d7da500b", "address": "fa:16:3e:9b:b1:b1", "network": {"id": "b566519a-edda-4c57-92ca-a702e586c638", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-790170115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74b060ffb3ac4ecd95dcd85d4744dc2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9912a098-b0", "ovs_interfaceid": "9912a098-b09d-4c69-819f-47a4d7da500b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1215.846622] env[70020]: DEBUG oslo_concurrency.lockutils [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.569s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1215.848971] env[70020]: DEBUG oslo_concurrency.lockutils [None req-20731126-2c04-471d-977c-19f712b3d334 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.511s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1215.849296] env[70020]: DEBUG nova.objects.instance [None req-20731126-2c04-471d-977c-19f712b3d334 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Lazy-loading 'resources' on Instance uuid c29d577e-9498-40b1-8e49-caff821cb80a {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1215.865544] env[70020]: INFO nova.scheduler.client.report [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Deleted allocations for instance 9e7bd10b-3a78-48d8-9b66-e3646635be6d [ 1215.965839] env[70020]: DEBUG oslo_vmware.api [None req-644ea31d-5ea1-40d4-a9b9-4728140f2d7f tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619190, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.47447} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.966168] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-644ea31d-5ea1-40d4-a9b9-4728140f2d7f tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1215.966374] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-644ea31d-5ea1-40d4-a9b9-4728140f2d7f tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1215.967941] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-644ea31d-5ea1-40d4-a9b9-4728140f2d7f tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1215.967941] env[70020]: INFO nova.compute.manager [None req-644ea31d-5ea1-40d4-a9b9-4728140f2d7f tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Took 1.68 seconds to destroy the instance on the hypervisor. [ 1215.967941] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-644ea31d-5ea1-40d4-a9b9-4728140f2d7f tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1215.968142] env[70020]: DEBUG nova.compute.manager [-] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1215.968142] env[70020]: DEBUG nova.network.neutron [-] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1216.221397] env[70020]: DEBUG nova.compute.manager [req-b3796cfd-cc8a-49a0-8cfa-5476b83ad872 req-f8b8f28c-db71-450f-a31d-a95195274e7b service nova] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Received event network-vif-deleted-2e2d4a2d-9a02-4d7e-b369-98b035a79190 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1216.221634] env[70020]: INFO nova.compute.manager [req-b3796cfd-cc8a-49a0-8cfa-5476b83ad872 req-f8b8f28c-db71-450f-a31d-a95195274e7b service nova] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Neutron deleted interface 2e2d4a2d-9a02-4d7e-b369-98b035a79190; detaching it from the instance and deleting it from the info cache [ 1216.221778] env[70020]: DEBUG nova.network.neutron [req-b3796cfd-cc8a-49a0-8cfa-5476b83ad872 req-f8b8f28c-db71-450f-a31d-a95195274e7b service nova] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1216.325344] env[70020]: DEBUG oslo_concurrency.lockutils [req-1b43248e-1a91-479b-8fd2-002f4f97cefb req-0b924be6-c3ff-41de-b104-f4029391b18f service nova] Releasing lock "refresh_cache-c9a3fb0f-95bf-4b51-ac06-99415acfa9cb" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1216.325661] env[70020]: DEBUG nova.compute.manager [req-1b43248e-1a91-479b-8fd2-002f4f97cefb req-0b924be6-c3ff-41de-b104-f4029391b18f service nova] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Received event network-vif-deleted-ca802e98-cbc6-48e4-8381-beef930ad40f {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1216.378279] env[70020]: DEBUG oslo_concurrency.lockutils [None req-03ecc54f-cb3d-4f67-8988-e666e0d900be tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lock "9e7bd10b-3a78-48d8-9b66-e3646635be6d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.181s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1216.513340] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39f781c7-42c4-4d32-bdcf-4fc78594c287 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.520270] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7249d9b7-e87f-4cf7-b734-0ef5399c1bc2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.550294] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ce22e04-f6e8-4f5c-b0f4-bd383830a0c0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.557126] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36a7896d-5397-4ea7-8787-cf2c4aca9a46 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.560906] env[70020]: DEBUG oslo_concurrency.lockutils [None req-de0e2ffa-b02a-49bb-8f3d-4063b8008850 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "interface-cc46e905-958e-4dc3-8f83-f8b5680f94de-db65a4ad-ec52-4dd1-bb59-3c000719f018" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1216.561138] env[70020]: DEBUG oslo_concurrency.lockutils [None req-de0e2ffa-b02a-49bb-8f3d-4063b8008850 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "interface-cc46e905-958e-4dc3-8f83-f8b5680f94de-db65a4ad-ec52-4dd1-bb59-3c000719f018" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1216.572900] env[70020]: DEBUG nova.compute.provider_tree [None req-20731126-2c04-471d-977c-19f712b3d334 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1216.704494] env[70020]: DEBUG nova.network.neutron [-] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1216.725141] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0591e656-6e1e-482d-8db8-93c72f5d5837 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.733926] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1da299dd-f3d0-4fc2-9e12-653b69de5d3e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.762564] env[70020]: DEBUG nova.compute.manager [req-b3796cfd-cc8a-49a0-8cfa-5476b83ad872 req-f8b8f28c-db71-450f-a31d-a95195274e7b service nova] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Detach interface failed, port_id=2e2d4a2d-9a02-4d7e-b369-98b035a79190, reason: Instance ca63297c-b7bc-45e9-8850-f46050905c26 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1217.064031] env[70020]: DEBUG oslo_concurrency.lockutils [None req-de0e2ffa-b02a-49bb-8f3d-4063b8008850 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "cc46e905-958e-4dc3-8f83-f8b5680f94de" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1217.064449] env[70020]: DEBUG oslo_concurrency.lockutils [None req-de0e2ffa-b02a-49bb-8f3d-4063b8008850 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquired lock "cc46e905-958e-4dc3-8f83-f8b5680f94de" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1217.065022] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6408b76b-a8ec-4d84-9897-2e2a6b260a74 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.085368] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-945535d5-c888-43be-9479-d34983dd8e2d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.116620] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-de0e2ffa-b02a-49bb-8f3d-4063b8008850 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Reconfiguring VM to detach interface {{(pid=70020) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1217.117828] env[70020]: DEBUG nova.scheduler.client.report [None req-20731126-2c04-471d-977c-19f712b3d334 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Updated inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with generation 180 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1217.118229] env[70020]: DEBUG nova.compute.provider_tree [None req-20731126-2c04-471d-977c-19f712b3d334 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Updating resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d generation from 180 to 181 during operation: update_inventory {{(pid=70020) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1217.118540] env[70020]: DEBUG nova.compute.provider_tree [None req-20731126-2c04-471d-977c-19f712b3d334 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1217.123326] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d5b1a3f0-6ca1-4964-87bb-b617d09f85a4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.144993] env[70020]: DEBUG oslo_vmware.api [None req-de0e2ffa-b02a-49bb-8f3d-4063b8008850 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1217.144993] env[70020]: value = "task-3619193" [ 1217.144993] env[70020]: _type = "Task" [ 1217.144993] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.153255] env[70020]: DEBUG oslo_vmware.api [None req-de0e2ffa-b02a-49bb-8f3d-4063b8008850 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619193, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.207256] env[70020]: INFO nova.compute.manager [-] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Took 1.24 seconds to deallocate network for instance. [ 1217.639581] env[70020]: DEBUG oslo_concurrency.lockutils [None req-20731126-2c04-471d-977c-19f712b3d334 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.791s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1217.641986] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 3.207s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1217.653502] env[70020]: DEBUG oslo_vmware.api [None req-de0e2ffa-b02a-49bb-8f3d-4063b8008850 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619193, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.658990] env[70020]: INFO nova.scheduler.client.report [None req-20731126-2c04-471d-977c-19f712b3d334 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Deleted allocations for instance c29d577e-9498-40b1-8e49-caff821cb80a [ 1217.713525] env[70020]: DEBUG oslo_concurrency.lockutils [None req-644ea31d-5ea1-40d4-a9b9-4728140f2d7f tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1218.149066] env[70020]: DEBUG nova.objects.instance [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lazy-loading 'migration_context' on Instance uuid 9962b718-ca31-4f09-91f3-133dd68612ad {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1218.156243] env[70020]: DEBUG oslo_vmware.api [None req-de0e2ffa-b02a-49bb-8f3d-4063b8008850 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619193, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.167052] env[70020]: DEBUG oslo_concurrency.lockutils [None req-20731126-2c04-471d-977c-19f712b3d334 tempest-InstanceActionsTestJSON-986456735 tempest-InstanceActionsTestJSON-986456735-project-member] Lock "c29d577e-9498-40b1-8e49-caff821cb80a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.242s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1218.657821] env[70020]: DEBUG oslo_vmware.api [None req-de0e2ffa-b02a-49bb-8f3d-4063b8008850 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619193, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.798094] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de0c8e71-7155-4239-b7b1-fa3d572b3dfa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.806508] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61e153e0-9da8-4dd2-82aa-e1cc0a042ec4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.838645] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ee127df-30cf-4e0b-a244-d9ef06a7a1b3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.846874] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4070acbf-0850-41c8-a306-d88e97efc8f7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.862364] env[70020]: DEBUG nova.compute.provider_tree [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1219.155901] env[70020]: DEBUG oslo_vmware.api [None req-de0e2ffa-b02a-49bb-8f3d-4063b8008850 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619193, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.396694] env[70020]: DEBUG nova.scheduler.client.report [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Updated inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with generation 181 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1219.397018] env[70020]: DEBUG nova.compute.provider_tree [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Updating resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d generation from 181 to 182 during operation: update_inventory {{(pid=70020) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1219.397362] env[70020]: DEBUG nova.compute.provider_tree [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1219.662580] env[70020]: DEBUG oslo_vmware.api [None req-de0e2ffa-b02a-49bb-8f3d-4063b8008850 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619193, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.157651] env[70020]: DEBUG oslo_vmware.api [None req-de0e2ffa-b02a-49bb-8f3d-4063b8008850 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619193, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.410147] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.768s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1220.415835] env[70020]: DEBUG oslo_concurrency.lockutils [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.983s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1220.417301] env[70020]: INFO nova.compute.claims [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1220.658112] env[70020]: DEBUG oslo_vmware.api [None req-de0e2ffa-b02a-49bb-8f3d-4063b8008850 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619193, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.127697] env[70020]: DEBUG oslo_concurrency.lockutils [None req-18495c3d-ff2a-4801-9c68-fbc8e86c6336 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquiring lock "5b69d3b2-c236-45f9-b35b-a9992b9c1c79" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1221.127937] env[70020]: DEBUG oslo_concurrency.lockutils [None req-18495c3d-ff2a-4801-9c68-fbc8e86c6336 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lock "5b69d3b2-c236-45f9-b35b-a9992b9c1c79" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1221.128171] env[70020]: DEBUG oslo_concurrency.lockutils [None req-18495c3d-ff2a-4801-9c68-fbc8e86c6336 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquiring lock "5b69d3b2-c236-45f9-b35b-a9992b9c1c79-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1221.128367] env[70020]: DEBUG oslo_concurrency.lockutils [None req-18495c3d-ff2a-4801-9c68-fbc8e86c6336 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lock "5b69d3b2-c236-45f9-b35b-a9992b9c1c79-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1221.128552] env[70020]: DEBUG oslo_concurrency.lockutils [None req-18495c3d-ff2a-4801-9c68-fbc8e86c6336 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lock "5b69d3b2-c236-45f9-b35b-a9992b9c1c79-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1221.131153] env[70020]: INFO nova.compute.manager [None req-18495c3d-ff2a-4801-9c68-fbc8e86c6336 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Terminating instance [ 1221.159839] env[70020]: DEBUG oslo_vmware.api [None req-de0e2ffa-b02a-49bb-8f3d-4063b8008850 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619193, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.268383] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Acquiring lock "602328f7-258a-44f5-802c-d580824beea0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1221.268609] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Lock "602328f7-258a-44f5-802c-d580824beea0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1221.576317] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4be0661b-281b-46e6-9e10-b18e2c424b62 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.584533] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3be3694e-b89a-4c91-bde6-dbb012683d13 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.614310] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e22c8c4-2474-4246-92fe-f00fb61b3f44 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.622038] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0063a04-e34b-4d6f-b110-7d9994cb3e49 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.633864] env[70020]: DEBUG nova.compute.provider_tree [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1221.635529] env[70020]: DEBUG nova.compute.manager [None req-18495c3d-ff2a-4801-9c68-fbc8e86c6336 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1221.635733] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-18495c3d-ff2a-4801-9c68-fbc8e86c6336 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1221.636650] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba0cc069-062c-439e-a833-4ec914929294 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.642959] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-18495c3d-ff2a-4801-9c68-fbc8e86c6336 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1221.643681] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7c456047-b896-4801-93d7-864d71e49130 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.654177] env[70020]: DEBUG oslo_vmware.api [None req-18495c3d-ff2a-4801-9c68-fbc8e86c6336 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1221.654177] env[70020]: value = "task-3619196" [ 1221.654177] env[70020]: _type = "Task" [ 1221.654177] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.660281] env[70020]: DEBUG oslo_vmware.api [None req-de0e2ffa-b02a-49bb-8f3d-4063b8008850 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619193, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.666656] env[70020]: DEBUG oslo_vmware.api [None req-18495c3d-ff2a-4801-9c68-fbc8e86c6336 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619196, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.771035] env[70020]: DEBUG nova.compute.manager [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1221.816405] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8b4c182e-4e2b-4504-84c6-58f900460082 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "1b25f8db-457e-4948-b9da-35e2fa5b897e" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1221.816405] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8b4c182e-4e2b-4504-84c6-58f900460082 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "1b25f8db-457e-4948-b9da-35e2fa5b897e" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1221.957426] env[70020]: INFO nova.compute.manager [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Swapping old allocation on dict_keys(['ee72c483-d9d9-4e62-8f73-e9f24668500d']) held by migration 9227eea3-db08-4f3a-9838-4e33a88b040b for instance [ 1221.979497] env[70020]: DEBUG nova.scheduler.client.report [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Overwriting current allocation {'allocations': {'ee72c483-d9d9-4e62-8f73-e9f24668500d': {'resources': {'VCPU': 1, 'MEMORY_MB': 256, 'DISK_GB': 1}, 'generation': 183}}, 'project_id': '4e3eae740ef84ef88aef113ed4d6e57b', 'user_id': '7b543e081f574f1f85874775a734a0a2', 'consumer_generation': 1} on consumer 9962b718-ca31-4f09-91f3-133dd68612ad {{(pid=70020) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1222.059556] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "refresh_cache-9962b718-ca31-4f09-91f3-133dd68612ad" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1222.059770] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquired lock "refresh_cache-9962b718-ca31-4f09-91f3-133dd68612ad" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1222.059950] env[70020]: DEBUG nova.network.neutron [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1222.161834] env[70020]: DEBUG oslo_vmware.api [None req-de0e2ffa-b02a-49bb-8f3d-4063b8008850 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619193, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.162837] env[70020]: ERROR nova.scheduler.client.report [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [req-309a44db-5a4b-4db8-910d-28a884267c75] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ee72c483-d9d9-4e62-8f73-e9f24668500d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-309a44db-5a4b-4db8-910d-28a884267c75"}]} [ 1222.169250] env[70020]: DEBUG oslo_vmware.api [None req-18495c3d-ff2a-4801-9c68-fbc8e86c6336 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619196, 'name': PowerOffVM_Task, 'duration_secs': 0.252746} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.169525] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-18495c3d-ff2a-4801-9c68-fbc8e86c6336 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1222.169700] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-18495c3d-ff2a-4801-9c68-fbc8e86c6336 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1222.169991] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dfce828a-c41c-47ab-a0f7-aa5a93c212a5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.182328] env[70020]: DEBUG nova.scheduler.client.report [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1222.197360] env[70020]: DEBUG nova.scheduler.client.report [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1222.197627] env[70020]: DEBUG nova.compute.provider_tree [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1222.210614] env[70020]: DEBUG nova.scheduler.client.report [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1222.231240] env[70020]: DEBUG nova.scheduler.client.report [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1222.244194] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-18495c3d-ff2a-4801-9c68-fbc8e86c6336 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1222.244491] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-18495c3d-ff2a-4801-9c68-fbc8e86c6336 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1222.244601] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-18495c3d-ff2a-4801-9c68-fbc8e86c6336 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Deleting the datastore file [datastore2] 5b69d3b2-c236-45f9-b35b-a9992b9c1c79 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1222.244885] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-17479c34-6f26-4959-b527-73177a9caa88 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.253572] env[70020]: DEBUG oslo_vmware.api [None req-18495c3d-ff2a-4801-9c68-fbc8e86c6336 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1222.253572] env[70020]: value = "task-3619199" [ 1222.253572] env[70020]: _type = "Task" [ 1222.253572] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.263151] env[70020]: DEBUG oslo_vmware.api [None req-18495c3d-ff2a-4801-9c68-fbc8e86c6336 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619199, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.292017] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1222.319197] env[70020]: DEBUG nova.compute.utils [None req-8b4c182e-4e2b-4504-84c6-58f900460082 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1222.385351] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-109d109a-9a5c-4733-bd1f-daba2d041ead {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.394031] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ae841d6-e055-4771-9fcc-50cf80a6ea5d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.427940] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22d01486-91b5-4765-bbb3-c17d1de90c8c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.436750] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9adc6996-98a4-457b-88e7-ca2433fe6f27 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.453652] env[70020]: DEBUG nova.compute.provider_tree [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1222.662260] env[70020]: DEBUG oslo_vmware.api [None req-de0e2ffa-b02a-49bb-8f3d-4063b8008850 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619193, 'name': ReconfigVM_Task} progress is 18%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.764123] env[70020]: DEBUG oslo_vmware.api [None req-18495c3d-ff2a-4801-9c68-fbc8e86c6336 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619199, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.196951} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.764408] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-18495c3d-ff2a-4801-9c68-fbc8e86c6336 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1222.764596] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-18495c3d-ff2a-4801-9c68-fbc8e86c6336 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1222.764800] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-18495c3d-ff2a-4801-9c68-fbc8e86c6336 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1222.764984] env[70020]: INFO nova.compute.manager [None req-18495c3d-ff2a-4801-9c68-fbc8e86c6336 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1222.765253] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-18495c3d-ff2a-4801-9c68-fbc8e86c6336 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1222.765464] env[70020]: DEBUG nova.compute.manager [-] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1222.765575] env[70020]: DEBUG nova.network.neutron [-] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1222.783288] env[70020]: DEBUG nova.network.neutron [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Updating instance_info_cache with network_info: [{"id": "6c9093b6-4b0e-4ac8-b1b1-4e54e43363a5", "address": "fa:16:3e:24:81:fe", "network": {"id": "ba2c4d3c-4191-4de5-8533-90ca5dfc1dc0", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-599216784-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e3eae740ef84ef88aef113ed4d6e57b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c9093b6-4b", "ovs_interfaceid": "6c9093b6-4b0e-4ac8-b1b1-4e54e43363a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1222.822143] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8b4c182e-4e2b-4504-84c6-58f900460082 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "1b25f8db-457e-4948-b9da-35e2fa5b897e" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1222.988945] env[70020]: DEBUG nova.scheduler.client.report [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updated inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with generation 184 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1222.989265] env[70020]: DEBUG nova.compute.provider_tree [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updating resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d generation from 184 to 185 during operation: update_inventory {{(pid=70020) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1222.989450] env[70020]: DEBUG nova.compute.provider_tree [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 75, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1223.060055] env[70020]: DEBUG nova.compute.manager [req-fbf0f307-fc53-457a-8036-90d3722fa4b6 req-34a1ab04-83e7-44c7-b868-a2bf5968a2ee service nova] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Received event network-vif-deleted-36d80bdd-ca39-476a-91b5-601ea7cb1316 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1223.060247] env[70020]: INFO nova.compute.manager [req-fbf0f307-fc53-457a-8036-90d3722fa4b6 req-34a1ab04-83e7-44c7-b868-a2bf5968a2ee service nova] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Neutron deleted interface 36d80bdd-ca39-476a-91b5-601ea7cb1316; detaching it from the instance and deleting it from the info cache [ 1223.060424] env[70020]: DEBUG nova.network.neutron [req-fbf0f307-fc53-457a-8036-90d3722fa4b6 req-34a1ab04-83e7-44c7-b868-a2bf5968a2ee service nova] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1223.161289] env[70020]: DEBUG oslo_vmware.api [None req-de0e2ffa-b02a-49bb-8f3d-4063b8008850 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619193, 'name': ReconfigVM_Task, 'duration_secs': 5.820612} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.161616] env[70020]: DEBUG oslo_concurrency.lockutils [None req-de0e2ffa-b02a-49bb-8f3d-4063b8008850 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Releasing lock "cc46e905-958e-4dc3-8f83-f8b5680f94de" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1223.161830] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-de0e2ffa-b02a-49bb-8f3d-4063b8008850 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Reconfigured VM to detach interface {{(pid=70020) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1223.286164] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Releasing lock "refresh_cache-9962b718-ca31-4f09-91f3-133dd68612ad" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1223.286843] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1223.287159] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8788e309-8012-4b57-8b2b-47e9b6def22e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.296755] env[70020]: DEBUG oslo_vmware.api [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1223.296755] env[70020]: value = "task-3619200" [ 1223.296755] env[70020]: _type = "Task" [ 1223.296755] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.306850] env[70020]: DEBUG oslo_vmware.api [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619200, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.494728] env[70020]: DEBUG oslo_concurrency.lockutils [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.078s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1223.494728] env[70020]: DEBUG nova.compute.manager [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1223.498013] env[70020]: DEBUG oslo_concurrency.lockutils [None req-644ea31d-5ea1-40d4-a9b9-4728140f2d7f tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.785s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1223.498237] env[70020]: DEBUG oslo_concurrency.lockutils [None req-644ea31d-5ea1-40d4-a9b9-4728140f2d7f tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1223.500376] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.209s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1223.501880] env[70020]: INFO nova.compute.claims [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1223.527334] env[70020]: INFO nova.scheduler.client.report [None req-644ea31d-5ea1-40d4-a9b9-4728140f2d7f tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Deleted allocations for instance ca63297c-b7bc-45e9-8850-f46050905c26 [ 1223.536854] env[70020]: DEBUG nova.network.neutron [-] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1223.563254] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8a579d3e-6b85-4414-9131-7b99b23bc69e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.573606] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7dbfe2a-ae80-4498-830e-6c85159dc5dd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.604760] env[70020]: DEBUG nova.compute.manager [req-fbf0f307-fc53-457a-8036-90d3722fa4b6 req-34a1ab04-83e7-44c7-b868-a2bf5968a2ee service nova] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Detach interface failed, port_id=36d80bdd-ca39-476a-91b5-601ea7cb1316, reason: Instance 5b69d3b2-c236-45f9-b35b-a9992b9c1c79 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1223.806708] env[70020]: DEBUG oslo_vmware.api [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619200, 'name': PowerOffVM_Task, 'duration_secs': 0.2297} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.806990] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1223.807700] env[70020]: DEBUG nova.virt.hardware [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1223.807927] env[70020]: DEBUG nova.virt.hardware [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1223.808084] env[70020]: DEBUG nova.virt.hardware [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1223.808292] env[70020]: DEBUG nova.virt.hardware [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1223.808656] env[70020]: DEBUG nova.virt.hardware [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1223.808656] env[70020]: DEBUG nova.virt.hardware [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1223.808798] env[70020]: DEBUG nova.virt.hardware [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1223.808976] env[70020]: DEBUG nova.virt.hardware [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1223.809173] env[70020]: DEBUG nova.virt.hardware [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1223.809338] env[70020]: DEBUG nova.virt.hardware [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1223.809531] env[70020]: DEBUG nova.virt.hardware [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1223.815761] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2d891a3a-6cb0-43c0-a7d6-f46ca613a53b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.832452] env[70020]: DEBUG oslo_vmware.api [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1223.832452] env[70020]: value = "task-3619202" [ 1223.832452] env[70020]: _type = "Task" [ 1223.832452] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.842844] env[70020]: DEBUG oslo_vmware.api [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619202, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.894986] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8b4c182e-4e2b-4504-84c6-58f900460082 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "1b25f8db-457e-4948-b9da-35e2fa5b897e" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1223.895342] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8b4c182e-4e2b-4504-84c6-58f900460082 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "1b25f8db-457e-4948-b9da-35e2fa5b897e" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1223.895651] env[70020]: INFO nova.compute.manager [None req-8b4c182e-4e2b-4504-84c6-58f900460082 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Attaching volume 401287c7-6649-4da2-80e2-87b30ea658bd to /dev/sdb [ 1223.933836] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b5eddd1-a00d-4172-8cd9-b2ce9afce13b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.945297] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29182ff2-3ecb-4444-9629-f6a79d000bf9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.966724] env[70020]: DEBUG nova.virt.block_device [None req-8b4c182e-4e2b-4504-84c6-58f900460082 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Updating existing volume attachment record: 35126a09-1700-4f75-a8a4-7c1c0d045262 {{(pid=70020) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1224.007067] env[70020]: DEBUG nova.compute.utils [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1224.007747] env[70020]: DEBUG nova.compute.manager [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1224.007923] env[70020]: DEBUG nova.network.neutron [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1224.035174] env[70020]: DEBUG oslo_concurrency.lockutils [None req-644ea31d-5ea1-40d4-a9b9-4728140f2d7f tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "ca63297c-b7bc-45e9-8850-f46050905c26" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.270s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1224.039756] env[70020]: INFO nova.compute.manager [-] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Took 1.27 seconds to deallocate network for instance. [ 1224.094998] env[70020]: DEBUG nova.policy [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1374458c1943470eba7e774715ba1ca9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f3f6d704dd464768953c41d34d34d944', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1224.342210] env[70020]: DEBUG oslo_vmware.api [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619202, 'name': ReconfigVM_Task, 'duration_secs': 0.173608} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.343013] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-909fd756-05b1-473e-899c-74d1104a2c4e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.362453] env[70020]: DEBUG nova.virt.hardware [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1224.362693] env[70020]: DEBUG nova.virt.hardware [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1224.362847] env[70020]: DEBUG nova.virt.hardware [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1224.363040] env[70020]: DEBUG nova.virt.hardware [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1224.363194] env[70020]: DEBUG nova.virt.hardware [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1224.363342] env[70020]: DEBUG nova.virt.hardware [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1224.363542] env[70020]: DEBUG nova.virt.hardware [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1224.363700] env[70020]: DEBUG nova.virt.hardware [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1224.363865] env[70020]: DEBUG nova.virt.hardware [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1224.364035] env[70020]: DEBUG nova.virt.hardware [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1224.364215] env[70020]: DEBUG nova.virt.hardware [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1224.365671] env[70020]: DEBUG nova.network.neutron [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Successfully created port: 954d7579-1660-4476-afe2-3759d551ef0c {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1224.367580] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2dc2aaca-979c-46c1-bf40-b4358f04166f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.374632] env[70020]: DEBUG oslo_vmware.api [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1224.374632] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52d71d9e-931e-26af-0dd0-2bef6984d867" [ 1224.374632] env[70020]: _type = "Task" [ 1224.374632] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.377948] env[70020]: DEBUG oslo_concurrency.lockutils [None req-de0e2ffa-b02a-49bb-8f3d-4063b8008850 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "refresh_cache-cc46e905-958e-4dc3-8f83-f8b5680f94de" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1224.378130] env[70020]: DEBUG oslo_concurrency.lockutils [None req-de0e2ffa-b02a-49bb-8f3d-4063b8008850 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquired lock "refresh_cache-cc46e905-958e-4dc3-8f83-f8b5680f94de" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1224.378305] env[70020]: DEBUG nova.network.neutron [None req-de0e2ffa-b02a-49bb-8f3d-4063b8008850 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1224.385394] env[70020]: DEBUG oslo_vmware.api [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52d71d9e-931e-26af-0dd0-2bef6984d867, 'name': SearchDatastore_Task, 'duration_secs': 0.0091} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.390722] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Reconfiguring VM instance instance-0000006f to detach disk 2000 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1224.391542] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b28a4ce6-d8f9-4220-adb6-1a35194c85a6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.411819] env[70020]: DEBUG oslo_vmware.api [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1224.411819] env[70020]: value = "task-3619204" [ 1224.411819] env[70020]: _type = "Task" [ 1224.411819] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.421995] env[70020]: DEBUG oslo_vmware.api [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619204, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.432369] env[70020]: DEBUG oslo_concurrency.lockutils [None req-24925911-8e27-4808-905a-c4e2d9d3084b tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "cc46e905-958e-4dc3-8f83-f8b5680f94de" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1224.432369] env[70020]: DEBUG oslo_concurrency.lockutils [None req-24925911-8e27-4808-905a-c4e2d9d3084b tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "cc46e905-958e-4dc3-8f83-f8b5680f94de" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1224.432369] env[70020]: DEBUG oslo_concurrency.lockutils [None req-24925911-8e27-4808-905a-c4e2d9d3084b tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "cc46e905-958e-4dc3-8f83-f8b5680f94de-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1224.432369] env[70020]: DEBUG oslo_concurrency.lockutils [None req-24925911-8e27-4808-905a-c4e2d9d3084b tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "cc46e905-958e-4dc3-8f83-f8b5680f94de-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1224.432369] env[70020]: DEBUG oslo_concurrency.lockutils [None req-24925911-8e27-4808-905a-c4e2d9d3084b tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "cc46e905-958e-4dc3-8f83-f8b5680f94de-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1224.433533] env[70020]: INFO nova.compute.manager [None req-24925911-8e27-4808-905a-c4e2d9d3084b tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Terminating instance [ 1224.517579] env[70020]: DEBUG nova.compute.manager [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1224.554554] env[70020]: DEBUG oslo_concurrency.lockutils [None req-18495c3d-ff2a-4801-9c68-fbc8e86c6336 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1224.665514] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a030538-b8ce-486c-a4c4-31d1c8aaeb6e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.673865] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42d48487-cedc-42a1-9e9d-191908198116 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.709603] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a79692a-a2de-4254-bb33-2ab241320ad2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.717736] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-642cd4a4-37db-47ca-9e54-ed8fa5a87a8e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.731212] env[70020]: DEBUG nova.compute.provider_tree [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1224.923695] env[70020]: DEBUG oslo_vmware.api [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619204, 'name': ReconfigVM_Task, 'duration_secs': 0.239771} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.924511] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Reconfigured VM instance instance-0000006f to detach disk 2000 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1224.924860] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5847f72-d20a-463a-9941-02028ac375c6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.942293] env[70020]: DEBUG nova.compute.manager [None req-24925911-8e27-4808-905a-c4e2d9d3084b tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1224.942611] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-24925911-8e27-4808-905a-c4e2d9d3084b tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1224.950606] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] 9962b718-ca31-4f09-91f3-133dd68612ad/9962b718-ca31-4f09-91f3-133dd68612ad.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1224.951437] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96e7f87d-c731-4b81-9ed8-5cb26f4d6f99 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.954199] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4b626530-69c6-4088-a4e8-96a51d6abc19 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.976147] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-24925911-8e27-4808-905a-c4e2d9d3084b tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1224.977517] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cac2d92f-206b-46ac-8051-eeb9817c3f7b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.979752] env[70020]: DEBUG oslo_vmware.api [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1224.979752] env[70020]: value = "task-3619205" [ 1224.979752] env[70020]: _type = "Task" [ 1224.979752] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.985881] env[70020]: DEBUG oslo_vmware.api [None req-24925911-8e27-4808-905a-c4e2d9d3084b tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1224.985881] env[70020]: value = "task-3619206" [ 1224.985881] env[70020]: _type = "Task" [ 1224.985881] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.989855] env[70020]: DEBUG oslo_vmware.api [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619205, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.999249] env[70020]: DEBUG oslo_vmware.api [None req-24925911-8e27-4808-905a-c4e2d9d3084b tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619206, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.093322] env[70020]: DEBUG nova.compute.manager [req-5e124588-54a0-43c3-904f-adc6ad85b389 req-78bdb609-2e84-4b01-817c-e1f2a3dc9cc8 service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Received event network-vif-deleted-db65a4ad-ec52-4dd1-bb59-3c000719f018 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1225.093322] env[70020]: INFO nova.compute.manager [req-5e124588-54a0-43c3-904f-adc6ad85b389 req-78bdb609-2e84-4b01-817c-e1f2a3dc9cc8 service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Neutron deleted interface db65a4ad-ec52-4dd1-bb59-3c000719f018; detaching it from the instance and deleting it from the info cache [ 1225.093322] env[70020]: DEBUG nova.network.neutron [req-5e124588-54a0-43c3-904f-adc6ad85b389 req-78bdb609-2e84-4b01-817c-e1f2a3dc9cc8 service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Updating instance_info_cache with network_info: [{"id": "0c413503-8ce1-454a-a6b0-3fb75d647a04", "address": "fa:16:3e:ca:8d:a7", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c413503-8c", "ovs_interfaceid": "0c413503-8ce1-454a-a6b0-3fb75d647a04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "36da126c-3f5a-43c8-98eb-774da4ecb681", "address": "fa:16:3e:7c:92:c8", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36da126c-3f", "ovs_interfaceid": "36da126c-3f5a-43c8-98eb-774da4ecb681", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1225.153982] env[70020]: INFO nova.network.neutron [None req-de0e2ffa-b02a-49bb-8f3d-4063b8008850 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Port db65a4ad-ec52-4dd1-bb59-3c000719f018 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1225.154233] env[70020]: INFO nova.network.neutron [None req-de0e2ffa-b02a-49bb-8f3d-4063b8008850 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Port 36da126c-3f5a-43c8-98eb-774da4ecb681 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1225.154730] env[70020]: DEBUG nova.network.neutron [None req-de0e2ffa-b02a-49bb-8f3d-4063b8008850 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Updating instance_info_cache with network_info: [{"id": "0c413503-8ce1-454a-a6b0-3fb75d647a04", "address": "fa:16:3e:ca:8d:a7", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c413503-8c", "ovs_interfaceid": "0c413503-8ce1-454a-a6b0-3fb75d647a04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1225.262952] env[70020]: DEBUG nova.scheduler.client.report [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Updated inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with generation 185 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1225.263507] env[70020]: DEBUG nova.compute.provider_tree [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Updating resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d generation from 185 to 186 during operation: update_inventory {{(pid=70020) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1225.263507] env[70020]: DEBUG nova.compute.provider_tree [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1225.490994] env[70020]: DEBUG oslo_vmware.api [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619205, 'name': ReconfigVM_Task, 'duration_secs': 0.311479} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.494250] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Reconfigured VM instance instance-0000006f to attach disk [datastore1] 9962b718-ca31-4f09-91f3-133dd68612ad/9962b718-ca31-4f09-91f3-133dd68612ad.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1225.495087] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c892fea3-9c6f-4555-81d9-528b5ac481e1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.502971] env[70020]: DEBUG oslo_vmware.api [None req-24925911-8e27-4808-905a-c4e2d9d3084b tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619206, 'name': PowerOffVM_Task, 'duration_secs': 0.235747} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.516505] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-24925911-8e27-4808-905a-c4e2d9d3084b tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1225.516697] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-24925911-8e27-4808-905a-c4e2d9d3084b tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1225.517055] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-733c0e71-f890-4421-b83e-0a2e1bc0f457 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.518981] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eaef5d8-d649-481f-87c8-2c9555157dc5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.537960] env[70020]: DEBUG nova.compute.manager [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1225.541135] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ac5f41c-fb19-44cc-b076-2c9266626fe1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.566781] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7b94826-8853-4821-a9e0-5ff0b75029ea {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.574287] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1225.576511] env[70020]: DEBUG nova.virt.hardware [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1225.576736] env[70020]: DEBUG nova.virt.hardware [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1225.576891] env[70020]: DEBUG nova.virt.hardware [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1225.577089] env[70020]: DEBUG nova.virt.hardware [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1225.577237] env[70020]: DEBUG nova.virt.hardware [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1225.577384] env[70020]: DEBUG nova.virt.hardware [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1225.577589] env[70020]: DEBUG nova.virt.hardware [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1225.577745] env[70020]: DEBUG nova.virt.hardware [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1225.577908] env[70020]: DEBUG nova.virt.hardware [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1225.578083] env[70020]: DEBUG nova.virt.hardware [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1225.578258] env[70020]: DEBUG nova.virt.hardware [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1225.578510] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6b604df0-e7ae-4302-9b79-16b14236b6ee {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.580541] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cd69653-c47d-4965-837d-bf2f57fb8dcf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.588219] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1fed59f-b739-482f-bff4-0eeeff3e00e6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.592805] env[70020]: DEBUG oslo_vmware.api [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1225.592805] env[70020]: value = "task-3619208" [ 1225.592805] env[70020]: _type = "Task" [ 1225.592805] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.596485] env[70020]: DEBUG oslo_concurrency.lockutils [req-5e124588-54a0-43c3-904f-adc6ad85b389 req-78bdb609-2e84-4b01-817c-e1f2a3dc9cc8 service nova] Acquiring lock "cc46e905-958e-4dc3-8f83-f8b5680f94de" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1225.608316] env[70020]: DEBUG oslo_vmware.api [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619208, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.657740] env[70020]: DEBUG oslo_concurrency.lockutils [None req-de0e2ffa-b02a-49bb-8f3d-4063b8008850 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Releasing lock "refresh_cache-cc46e905-958e-4dc3-8f83-f8b5680f94de" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1225.768816] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.268s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1225.769320] env[70020]: DEBUG nova.compute.manager [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1225.772229] env[70020]: DEBUG oslo_concurrency.lockutils [None req-18495c3d-ff2a-4801-9c68-fbc8e86c6336 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.218s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1225.772451] env[70020]: DEBUG nova.objects.instance [None req-18495c3d-ff2a-4801-9c68-fbc8e86c6336 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lazy-loading 'resources' on Instance uuid 5b69d3b2-c236-45f9-b35b-a9992b9c1c79 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1225.830672] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-24925911-8e27-4808-905a-c4e2d9d3084b tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1225.830909] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-24925911-8e27-4808-905a-c4e2d9d3084b tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1225.831109] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-24925911-8e27-4808-905a-c4e2d9d3084b tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Deleting the datastore file [datastore2] cc46e905-958e-4dc3-8f83-f8b5680f94de {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1225.831443] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0b0da75c-fa57-42b1-830e-5ddfcfe691b1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.839910] env[70020]: DEBUG oslo_vmware.api [None req-24925911-8e27-4808-905a-c4e2d9d3084b tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1225.839910] env[70020]: value = "task-3619210" [ 1225.839910] env[70020]: _type = "Task" [ 1225.839910] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.849407] env[70020]: DEBUG oslo_vmware.api [None req-24925911-8e27-4808-905a-c4e2d9d3084b tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619210, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.030661] env[70020]: DEBUG nova.network.neutron [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Successfully updated port: 954d7579-1660-4476-afe2-3759d551ef0c {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1226.103853] env[70020]: DEBUG oslo_vmware.api [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619208, 'name': PowerOnVM_Task, 'duration_secs': 0.463564} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.103853] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1226.162980] env[70020]: DEBUG oslo_concurrency.lockutils [None req-de0e2ffa-b02a-49bb-8f3d-4063b8008850 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "interface-cc46e905-958e-4dc3-8f83-f8b5680f94de-db65a4ad-ec52-4dd1-bb59-3c000719f018" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.600s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1226.278016] env[70020]: DEBUG nova.compute.utils [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1226.279510] env[70020]: DEBUG nova.compute.manager [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Not allocating networking since 'none' was specified. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1226.352646] env[70020]: DEBUG oslo_vmware.api [None req-24925911-8e27-4808-905a-c4e2d9d3084b tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619210, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.203099} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.352947] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-24925911-8e27-4808-905a-c4e2d9d3084b tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1226.353154] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-24925911-8e27-4808-905a-c4e2d9d3084b tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1226.353330] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-24925911-8e27-4808-905a-c4e2d9d3084b tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1226.353499] env[70020]: INFO nova.compute.manager [None req-24925911-8e27-4808-905a-c4e2d9d3084b tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Took 1.41 seconds to destroy the instance on the hypervisor. [ 1226.353735] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-24925911-8e27-4808-905a-c4e2d9d3084b tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1226.353925] env[70020]: DEBUG nova.compute.manager [-] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1226.354028] env[70020]: DEBUG nova.network.neutron [-] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1226.399111] env[70020]: DEBUG neutronclient.v2_0.client [-] Error message: {"NeutronError": {"type": "PortNotFound", "message": "Port 36da126c-3f5a-43c8-98eb-774da4ecb681 could not be found.", "detail": ""}} {{(pid=70020) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1226.399358] env[70020]: DEBUG nova.network.neutron [-] Unable to show port 36da126c-3f5a-43c8-98eb-774da4ecb681 as it no longer exists. {{(pid=70020) _unbind_ports /opt/stack/nova/nova/network/neutron.py:666}} [ 1226.414806] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc254906-662e-4080-87b2-326da5ffbe23 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.423391] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0effe0ec-cead-40ce-94b1-ca97e4d97336 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.455711] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07536f35-b4a0-434d-8b11-3af9f38a158f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.463465] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0708405-537d-4a91-9b99-b3b48b4ed489 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.477514] env[70020]: DEBUG nova.compute.provider_tree [None req-18495c3d-ff2a-4801-9c68-fbc8e86c6336 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1226.531869] env[70020]: DEBUG oslo_concurrency.lockutils [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "refresh_cache-115a8a58-d3ce-4778-9bc7-c75d0007b499" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1226.532191] env[70020]: DEBUG oslo_concurrency.lockutils [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquired lock "refresh_cache-115a8a58-d3ce-4778-9bc7-c75d0007b499" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1226.532191] env[70020]: DEBUG nova.network.neutron [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1226.782801] env[70020]: DEBUG nova.compute.manager [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1226.981351] env[70020]: DEBUG nova.scheduler.client.report [None req-18495c3d-ff2a-4801-9c68-fbc8e86c6336 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1227.063466] env[70020]: DEBUG nova.network.neutron [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1227.116119] env[70020]: INFO nova.compute.manager [None req-2b70f5a7-0b8d-49a3-9dd4-d337597dffab tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Updating instance to original state: 'active' [ 1227.123866] env[70020]: DEBUG nova.compute.manager [req-2258aa45-3e61-4ec3-964d-135fbb6baca3 req-8df4cb3f-79a2-4815-8b74-a6f1170e4487 service nova] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Received event network-vif-plugged-954d7579-1660-4476-afe2-3759d551ef0c {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1227.123866] env[70020]: DEBUG oslo_concurrency.lockutils [req-2258aa45-3e61-4ec3-964d-135fbb6baca3 req-8df4cb3f-79a2-4815-8b74-a6f1170e4487 service nova] Acquiring lock "115a8a58-d3ce-4778-9bc7-c75d0007b499-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1227.124082] env[70020]: DEBUG oslo_concurrency.lockutils [req-2258aa45-3e61-4ec3-964d-135fbb6baca3 req-8df4cb3f-79a2-4815-8b74-a6f1170e4487 service nova] Lock "115a8a58-d3ce-4778-9bc7-c75d0007b499-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1227.124153] env[70020]: DEBUG oslo_concurrency.lockutils [req-2258aa45-3e61-4ec3-964d-135fbb6baca3 req-8df4cb3f-79a2-4815-8b74-a6f1170e4487 service nova] Lock "115a8a58-d3ce-4778-9bc7-c75d0007b499-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1227.124309] env[70020]: DEBUG nova.compute.manager [req-2258aa45-3e61-4ec3-964d-135fbb6baca3 req-8df4cb3f-79a2-4815-8b74-a6f1170e4487 service nova] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] No waiting events found dispatching network-vif-plugged-954d7579-1660-4476-afe2-3759d551ef0c {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1227.124515] env[70020]: WARNING nova.compute.manager [req-2258aa45-3e61-4ec3-964d-135fbb6baca3 req-8df4cb3f-79a2-4815-8b74-a6f1170e4487 service nova] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Received unexpected event network-vif-plugged-954d7579-1660-4476-afe2-3759d551ef0c for instance with vm_state building and task_state spawning. [ 1227.124650] env[70020]: DEBUG nova.compute.manager [req-2258aa45-3e61-4ec3-964d-135fbb6baca3 req-8df4cb3f-79a2-4815-8b74-a6f1170e4487 service nova] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Received event network-changed-954d7579-1660-4476-afe2-3759d551ef0c {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1227.124805] env[70020]: DEBUG nova.compute.manager [req-2258aa45-3e61-4ec3-964d-135fbb6baca3 req-8df4cb3f-79a2-4815-8b74-a6f1170e4487 service nova] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Refreshing instance network info cache due to event network-changed-954d7579-1660-4476-afe2-3759d551ef0c. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1227.125011] env[70020]: DEBUG oslo_concurrency.lockutils [req-2258aa45-3e61-4ec3-964d-135fbb6baca3 req-8df4cb3f-79a2-4815-8b74-a6f1170e4487 service nova] Acquiring lock "refresh_cache-115a8a58-d3ce-4778-9bc7-c75d0007b499" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1227.207481] env[70020]: DEBUG nova.network.neutron [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Updating instance_info_cache with network_info: [{"id": "954d7579-1660-4476-afe2-3759d551ef0c", "address": "fa:16:3e:21:4c:9a", "network": {"id": "c7c6ab1d-12b0-4699-ab0e-47b8d23ee3bc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2032377329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3f6d704dd464768953c41d34d34d944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6a6f7bb-6106-4cfd-9aef-b85628d0cefa", "external-id": "nsx-vlan-transportzone-194", "segmentation_id": 194, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap954d7579-16", "ovs_interfaceid": "954d7579-1660-4476-afe2-3759d551ef0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1227.268905] env[70020]: DEBUG nova.network.neutron [-] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1227.487920] env[70020]: DEBUG oslo_concurrency.lockutils [None req-18495c3d-ff2a-4801-9c68-fbc8e86c6336 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.715s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1227.509571] env[70020]: INFO nova.scheduler.client.report [None req-18495c3d-ff2a-4801-9c68-fbc8e86c6336 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Deleted allocations for instance 5b69d3b2-c236-45f9-b35b-a9992b9c1c79 [ 1227.710342] env[70020]: DEBUG oslo_concurrency.lockutils [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Releasing lock "refresh_cache-115a8a58-d3ce-4778-9bc7-c75d0007b499" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1227.710731] env[70020]: DEBUG nova.compute.manager [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Instance network_info: |[{"id": "954d7579-1660-4476-afe2-3759d551ef0c", "address": "fa:16:3e:21:4c:9a", "network": {"id": "c7c6ab1d-12b0-4699-ab0e-47b8d23ee3bc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2032377329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3f6d704dd464768953c41d34d34d944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6a6f7bb-6106-4cfd-9aef-b85628d0cefa", "external-id": "nsx-vlan-transportzone-194", "segmentation_id": 194, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap954d7579-16", "ovs_interfaceid": "954d7579-1660-4476-afe2-3759d551ef0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1227.710946] env[70020]: DEBUG oslo_concurrency.lockutils [req-2258aa45-3e61-4ec3-964d-135fbb6baca3 req-8df4cb3f-79a2-4815-8b74-a6f1170e4487 service nova] Acquired lock "refresh_cache-115a8a58-d3ce-4778-9bc7-c75d0007b499" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1227.711148] env[70020]: DEBUG nova.network.neutron [req-2258aa45-3e61-4ec3-964d-135fbb6baca3 req-8df4cb3f-79a2-4815-8b74-a6f1170e4487 service nova] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Refreshing network info cache for port 954d7579-1660-4476-afe2-3759d551ef0c {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1227.712316] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:21:4c:9a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd6a6f7bb-6106-4cfd-9aef-b85628d0cefa', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '954d7579-1660-4476-afe2-3759d551ef0c', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1227.720734] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1227.721758] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1227.721999] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8f8fd0ad-671e-42ec-b5fa-f35eb8609b88 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.747777] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1227.747777] env[70020]: value = "task-3619212" [ 1227.747777] env[70020]: _type = "Task" [ 1227.747777] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.756288] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619212, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.771896] env[70020]: INFO nova.compute.manager [-] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Took 1.42 seconds to deallocate network for instance. [ 1227.792353] env[70020]: DEBUG nova.compute.manager [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1227.820321] env[70020]: DEBUG nova.virt.hardware [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1227.820623] env[70020]: DEBUG nova.virt.hardware [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1227.820800] env[70020]: DEBUG nova.virt.hardware [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1227.821027] env[70020]: DEBUG nova.virt.hardware [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1227.821209] env[70020]: DEBUG nova.virt.hardware [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1227.821411] env[70020]: DEBUG nova.virt.hardware [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1227.821675] env[70020]: DEBUG nova.virt.hardware [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1227.821852] env[70020]: DEBUG nova.virt.hardware [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1227.822070] env[70020]: DEBUG nova.virt.hardware [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1227.822281] env[70020]: DEBUG nova.virt.hardware [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1227.822474] env[70020]: DEBUG nova.virt.hardware [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1227.823427] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca05958-b6a1-4101-894b-c022b476ac3f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.832833] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eb077e5-dd24-4b9c-a9a8-7492104d245f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.847247] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Instance VIF info [] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1227.852917] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Creating folder: Project (cf54e4d1d741412b907f78a98196ba4d). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1227.853226] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2702d4fa-1a73-4243-9c6b-03b90298dd88 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.864522] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Created folder: Project (cf54e4d1d741412b907f78a98196ba4d) in parent group-v721521. [ 1227.864750] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Creating folder: Instances. Parent ref: group-v721833. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1227.864962] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-47cd5d23-46a8-4a0d-bd3d-7491fe6dc078 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.874822] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Created folder: Instances in parent group-v721833. [ 1227.875076] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1227.875276] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 602328f7-258a-44f5-802c-d580824beea0] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1227.875483] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9d0b1d77-eed5-42e1-b388-483f13254e8c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.892766] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1227.892766] env[70020]: value = "task-3619215" [ 1227.892766] env[70020]: _type = "Task" [ 1227.892766] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.900182] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619215, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.017516] env[70020]: DEBUG oslo_concurrency.lockutils [None req-18495c3d-ff2a-4801-9c68-fbc8e86c6336 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lock "5b69d3b2-c236-45f9-b35b-a9992b9c1c79" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.889s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1228.259467] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619212, 'name': CreateVM_Task, 'duration_secs': 0.346227} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.259643] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1228.260338] env[70020]: DEBUG oslo_concurrency.lockutils [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1228.260512] env[70020]: DEBUG oslo_concurrency.lockutils [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1228.260818] env[70020]: DEBUG oslo_concurrency.lockutils [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1228.261091] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a85821fe-38ac-4ed5-89e8-ea88dea85ed1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.266449] env[70020]: DEBUG oslo_vmware.api [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1228.266449] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5289a3db-1b4a-f0d8-d421-011c12ccf6ca" [ 1228.266449] env[70020]: _type = "Task" [ 1228.266449] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.275967] env[70020]: DEBUG oslo_vmware.api [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5289a3db-1b4a-f0d8-d421-011c12ccf6ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.279040] env[70020]: DEBUG oslo_concurrency.lockutils [None req-24925911-8e27-4808-905a-c4e2d9d3084b tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1228.279426] env[70020]: DEBUG oslo_concurrency.lockutils [None req-24925911-8e27-4808-905a-c4e2d9d3084b tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1228.279543] env[70020]: DEBUG nova.objects.instance [None req-24925911-8e27-4808-905a-c4e2d9d3084b tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lazy-loading 'resources' on Instance uuid cc46e905-958e-4dc3-8f83-f8b5680f94de {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1228.404200] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619215, 'name': CreateVM_Task, 'duration_secs': 0.350756} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.404416] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 602328f7-258a-44f5-802c-d580824beea0] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1228.405288] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1228.510604] env[70020]: DEBUG nova.network.neutron [req-2258aa45-3e61-4ec3-964d-135fbb6baca3 req-8df4cb3f-79a2-4815-8b74-a6f1170e4487 service nova] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Updated VIF entry in instance network info cache for port 954d7579-1660-4476-afe2-3759d551ef0c. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1228.510982] env[70020]: DEBUG nova.network.neutron [req-2258aa45-3e61-4ec3-964d-135fbb6baca3 req-8df4cb3f-79a2-4815-8b74-a6f1170e4487 service nova] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Updating instance_info_cache with network_info: [{"id": "954d7579-1660-4476-afe2-3759d551ef0c", "address": "fa:16:3e:21:4c:9a", "network": {"id": "c7c6ab1d-12b0-4699-ab0e-47b8d23ee3bc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2032377329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3f6d704dd464768953c41d34d34d944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6a6f7bb-6106-4cfd-9aef-b85628d0cefa", "external-id": "nsx-vlan-transportzone-194", "segmentation_id": 194, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap954d7579-16", "ovs_interfaceid": "954d7579-1660-4476-afe2-3759d551ef0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1228.525094] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b4c182e-4e2b-4504-84c6-58f900460082 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Volume attach. Driver type: vmdk {{(pid=70020) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1228.525339] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b4c182e-4e2b-4504-84c6-58f900460082 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721831', 'volume_id': '401287c7-6649-4da2-80e2-87b30ea658bd', 'name': 'volume-401287c7-6649-4da2-80e2-87b30ea658bd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1b25f8db-457e-4948-b9da-35e2fa5b897e', 'attached_at': '', 'detached_at': '', 'volume_id': '401287c7-6649-4da2-80e2-87b30ea658bd', 'serial': '401287c7-6649-4da2-80e2-87b30ea658bd'} {{(pid=70020) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1228.526449] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83266026-65a4-4629-8409-0c62600ddfc7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.544444] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e30ff03-580a-49a2-afce-66c96855b529 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.570568] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b4c182e-4e2b-4504-84c6-58f900460082 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Reconfiguring VM instance instance-00000073 to attach disk [datastore2] volume-401287c7-6649-4da2-80e2-87b30ea658bd/volume-401287c7-6649-4da2-80e2-87b30ea658bd.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1228.570872] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-275d5afa-500c-495a-af00-e2fd08425829 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.590625] env[70020]: DEBUG oslo_vmware.api [None req-8b4c182e-4e2b-4504-84c6-58f900460082 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 1228.590625] env[70020]: value = "task-3619217" [ 1228.590625] env[70020]: _type = "Task" [ 1228.590625] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.600438] env[70020]: DEBUG oslo_vmware.api [None req-8b4c182e-4e2b-4504-84c6-58f900460082 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3619217, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.778086] env[70020]: DEBUG oslo_vmware.api [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5289a3db-1b4a-f0d8-d421-011c12ccf6ca, 'name': SearchDatastore_Task, 'duration_secs': 0.011481} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.778461] env[70020]: DEBUG oslo_concurrency.lockutils [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1228.778607] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1228.778809] env[70020]: DEBUG oslo_concurrency.lockutils [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1228.778957] env[70020]: DEBUG oslo_concurrency.lockutils [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1228.779153] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1228.779441] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1228.779748] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1228.779983] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f75ad9b6-618f-44f5-961f-86f528748536 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.781924] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69b789a0-a307-499e-a8dd-66d8c711e7d8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.790741] env[70020]: DEBUG oslo_vmware.api [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Waiting for the task: (returnval){ [ 1228.790741] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]521189e6-2f4d-1c67-3343-52f900e93659" [ 1228.790741] env[70020]: _type = "Task" [ 1228.790741] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.795049] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1228.795237] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1228.796207] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-897b28dc-8c2b-4887-bd4d-19aad661bd87 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.801761] env[70020]: DEBUG oslo_vmware.api [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]521189e6-2f4d-1c67-3343-52f900e93659, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.804823] env[70020]: DEBUG oslo_vmware.api [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1228.804823] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52bd2459-0ade-f6dc-2002-ded51f535fb6" [ 1228.804823] env[70020]: _type = "Task" [ 1228.804823] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.815336] env[70020]: DEBUG oslo_vmware.api [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52bd2459-0ade-f6dc-2002-ded51f535fb6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.905780] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42c399ee-6cf6-42f4-9421-92dc25730c44 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.913437] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d518db4-7329-4515-a633-e4c2186be507 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.916743] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7c4359b5-dd06-4585-b0d3-74697e886d68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquiring lock "b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1228.916970] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7c4359b5-dd06-4585-b0d3-74697e886d68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lock "b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1228.917184] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7c4359b5-dd06-4585-b0d3-74697e886d68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquiring lock "b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1228.917367] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7c4359b5-dd06-4585-b0d3-74697e886d68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lock "b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1228.917538] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7c4359b5-dd06-4585-b0d3-74697e886d68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lock "b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1228.919771] env[70020]: INFO nova.compute.manager [None req-7c4359b5-dd06-4585-b0d3-74697e886d68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Terminating instance [ 1228.949226] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2de11a56-c2e5-46cb-8772-0e31ade647a4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.957608] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7e53b12-5f0d-4ec5-9dd3-5444b1a9a7e8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.972094] env[70020]: DEBUG nova.compute.provider_tree [None req-24925911-8e27-4808-905a-c4e2d9d3084b tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1228.980440] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8e7a72f4-3594-4768-84ca-e76428f6a3cb tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "9962b718-ca31-4f09-91f3-133dd68612ad" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1228.980658] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8e7a72f4-3594-4768-84ca-e76428f6a3cb tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "9962b718-ca31-4f09-91f3-133dd68612ad" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1228.980849] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8e7a72f4-3594-4768-84ca-e76428f6a3cb tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "9962b718-ca31-4f09-91f3-133dd68612ad-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1228.981056] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8e7a72f4-3594-4768-84ca-e76428f6a3cb tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "9962b718-ca31-4f09-91f3-133dd68612ad-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1228.981220] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8e7a72f4-3594-4768-84ca-e76428f6a3cb tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "9962b718-ca31-4f09-91f3-133dd68612ad-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1228.982959] env[70020]: INFO nova.compute.manager [None req-8e7a72f4-3594-4768-84ca-e76428f6a3cb tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Terminating instance [ 1229.013945] env[70020]: DEBUG oslo_concurrency.lockutils [req-2258aa45-3e61-4ec3-964d-135fbb6baca3 req-8df4cb3f-79a2-4815-8b74-a6f1170e4487 service nova] Releasing lock "refresh_cache-115a8a58-d3ce-4778-9bc7-c75d0007b499" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1229.014320] env[70020]: DEBUG nova.compute.manager [req-2258aa45-3e61-4ec3-964d-135fbb6baca3 req-8df4cb3f-79a2-4815-8b74-a6f1170e4487 service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Received event network-vif-deleted-0c413503-8ce1-454a-a6b0-3fb75d647a04 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1229.014535] env[70020]: INFO nova.compute.manager [req-2258aa45-3e61-4ec3-964d-135fbb6baca3 req-8df4cb3f-79a2-4815-8b74-a6f1170e4487 service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Neutron deleted interface 0c413503-8ce1-454a-a6b0-3fb75d647a04; detaching it from the instance and deleting it from the info cache [ 1229.014710] env[70020]: DEBUG nova.network.neutron [req-2258aa45-3e61-4ec3-964d-135fbb6baca3 req-8df4cb3f-79a2-4815-8b74-a6f1170e4487 service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1229.100792] env[70020]: DEBUG oslo_vmware.api [None req-8b4c182e-4e2b-4504-84c6-58f900460082 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3619217, 'name': ReconfigVM_Task, 'duration_secs': 0.374905} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.101100] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b4c182e-4e2b-4504-84c6-58f900460082 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Reconfigured VM instance instance-00000073 to attach disk [datastore2] volume-401287c7-6649-4da2-80e2-87b30ea658bd/volume-401287c7-6649-4da2-80e2-87b30ea658bd.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1229.105935] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-21a95638-2976-4b25-8109-29bad2f5f448 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.124774] env[70020]: DEBUG oslo_vmware.api [None req-8b4c182e-4e2b-4504-84c6-58f900460082 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 1229.124774] env[70020]: value = "task-3619218" [ 1229.124774] env[70020]: _type = "Task" [ 1229.124774] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.134036] env[70020]: DEBUG oslo_vmware.api [None req-8b4c182e-4e2b-4504-84c6-58f900460082 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3619218, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.301741] env[70020]: DEBUG oslo_vmware.api [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]521189e6-2f4d-1c67-3343-52f900e93659, 'name': SearchDatastore_Task, 'duration_secs': 0.017964} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.301988] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1229.302368] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1229.302554] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1229.316545] env[70020]: DEBUG oslo_vmware.api [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52bd2459-0ade-f6dc-2002-ded51f535fb6, 'name': SearchDatastore_Task, 'duration_secs': 0.01799} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.317322] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ba7187d-1f1f-43dc-b2d4-4aedb13a0b15 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.322432] env[70020]: DEBUG oslo_vmware.api [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1229.322432] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52c050e5-3c09-98e8-7376-097f8be2bf80" [ 1229.322432] env[70020]: _type = "Task" [ 1229.322432] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.330384] env[70020]: DEBUG oslo_vmware.api [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52c050e5-3c09-98e8-7376-097f8be2bf80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.424052] env[70020]: DEBUG nova.compute.manager [None req-7c4359b5-dd06-4585-b0d3-74697e886d68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1229.424052] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-7c4359b5-dd06-4585-b0d3-74697e886d68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1229.424640] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eccd4210-e553-4da4-a759-9717ef60e0c5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.432254] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c4359b5-dd06-4585-b0d3-74697e886d68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1229.432478] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-971eeac9-0ac6-430d-9751-f6cd725e0ff4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.438896] env[70020]: DEBUG oslo_vmware.api [None req-7c4359b5-dd06-4585-b0d3-74697e886d68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1229.438896] env[70020]: value = "task-3619219" [ 1229.438896] env[70020]: _type = "Task" [ 1229.438896] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.446066] env[70020]: DEBUG oslo_vmware.api [None req-7c4359b5-dd06-4585-b0d3-74697e886d68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619219, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.475346] env[70020]: DEBUG nova.scheduler.client.report [None req-24925911-8e27-4808-905a-c4e2d9d3084b tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1229.486197] env[70020]: DEBUG nova.compute.manager [None req-8e7a72f4-3594-4768-84ca-e76428f6a3cb tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1229.486479] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8e7a72f4-3594-4768-84ca-e76428f6a3cb tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1229.487490] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bce184c3-1953-489d-9a85-0475fffa2ace {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.498045] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e7a72f4-3594-4768-84ca-e76428f6a3cb tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1229.498165] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-455d30c9-8c26-4ee6-a03b-515c6090c0a7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.504880] env[70020]: DEBUG oslo_vmware.api [None req-8e7a72f4-3594-4768-84ca-e76428f6a3cb tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1229.504880] env[70020]: value = "task-3619220" [ 1229.504880] env[70020]: _type = "Task" [ 1229.504880] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.513483] env[70020]: DEBUG oslo_vmware.api [None req-8e7a72f4-3594-4768-84ca-e76428f6a3cb tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619220, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.517235] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f6850591-796e-48f6-adde-26ec7991ce87 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.526811] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9666aa5-a868-49d6-b451-1b1f8129d5da {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.559170] env[70020]: DEBUG nova.compute.manager [req-2258aa45-3e61-4ec3-964d-135fbb6baca3 req-8df4cb3f-79a2-4815-8b74-a6f1170e4487 service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Detach interface failed, port_id=0c413503-8ce1-454a-a6b0-3fb75d647a04, reason: Instance cc46e905-958e-4dc3-8f83-f8b5680f94de could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1229.633729] env[70020]: DEBUG oslo_vmware.api [None req-8b4c182e-4e2b-4504-84c6-58f900460082 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3619218, 'name': ReconfigVM_Task, 'duration_secs': 0.258818} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.634055] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b4c182e-4e2b-4504-84c6-58f900460082 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721831', 'volume_id': '401287c7-6649-4da2-80e2-87b30ea658bd', 'name': 'volume-401287c7-6649-4da2-80e2-87b30ea658bd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1b25f8db-457e-4948-b9da-35e2fa5b897e', 'attached_at': '', 'detached_at': '', 'volume_id': '401287c7-6649-4da2-80e2-87b30ea658bd', 'serial': '401287c7-6649-4da2-80e2-87b30ea658bd'} {{(pid=70020) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1229.832995] env[70020]: DEBUG oslo_vmware.api [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52c050e5-3c09-98e8-7376-097f8be2bf80, 'name': SearchDatastore_Task, 'duration_secs': 0.015268} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.833406] env[70020]: DEBUG oslo_concurrency.lockutils [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1229.833521] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 115a8a58-d3ce-4778-9bc7-c75d0007b499/115a8a58-d3ce-4778-9bc7-c75d0007b499.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1229.833791] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1229.833972] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1229.834194] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-32648d7c-37c8-42f3-93b5-66b7cfb152a2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.836100] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-684fe49c-52b3-421f-a1b4-dab18fef13e2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.844260] env[70020]: DEBUG oslo_vmware.api [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1229.844260] env[70020]: value = "task-3619221" [ 1229.844260] env[70020]: _type = "Task" [ 1229.844260] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.845345] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1229.845515] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1229.848754] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84fd33cc-24b9-4896-8614-d6f9faa15567 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.856055] env[70020]: DEBUG oslo_vmware.api [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619221, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.857185] env[70020]: DEBUG oslo_vmware.api [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Waiting for the task: (returnval){ [ 1229.857185] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]523b2cf7-d061-b97a-3bed-a1ccca78042c" [ 1229.857185] env[70020]: _type = "Task" [ 1229.857185] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.866027] env[70020]: DEBUG oslo_vmware.api [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]523b2cf7-d061-b97a-3bed-a1ccca78042c, 'name': SearchDatastore_Task, 'duration_secs': 0.009953} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.866775] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29df0b2e-e63e-4b28-9a06-a7c5f6c04d41 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.871560] env[70020]: DEBUG oslo_vmware.api [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Waiting for the task: (returnval){ [ 1229.871560] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52ee4837-83bc-72fc-6827-41a4fb60ed82" [ 1229.871560] env[70020]: _type = "Task" [ 1229.871560] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.879261] env[70020]: DEBUG oslo_vmware.api [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ee4837-83bc-72fc-6827-41a4fb60ed82, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.950019] env[70020]: DEBUG oslo_vmware.api [None req-7c4359b5-dd06-4585-b0d3-74697e886d68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619219, 'name': PowerOffVM_Task, 'duration_secs': 0.260188} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.950278] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c4359b5-dd06-4585-b0d3-74697e886d68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1229.950445] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-7c4359b5-dd06-4585-b0d3-74697e886d68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1229.950695] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b6b93132-b95f-47a2-b032-6fda68c42c85 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.980383] env[70020]: DEBUG oslo_concurrency.lockutils [None req-24925911-8e27-4808-905a-c4e2d9d3084b tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.701s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1230.000736] env[70020]: INFO nova.scheduler.client.report [None req-24925911-8e27-4808-905a-c4e2d9d3084b tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Deleted allocations for instance cc46e905-958e-4dc3-8f83-f8b5680f94de [ 1230.018279] env[70020]: DEBUG oslo_vmware.api [None req-8e7a72f4-3594-4768-84ca-e76428f6a3cb tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619220, 'name': PowerOffVM_Task, 'duration_secs': 0.221347} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.018564] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e7a72f4-3594-4768-84ca-e76428f6a3cb tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1230.018731] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8e7a72f4-3594-4768-84ca-e76428f6a3cb tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1230.019039] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-50d77eff-acd4-4d81-ba0e-c578bb8eba2e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.024643] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-7c4359b5-dd06-4585-b0d3-74697e886d68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1230.024902] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-7c4359b5-dd06-4585-b0d3-74697e886d68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1230.025094] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c4359b5-dd06-4585-b0d3-74697e886d68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Deleting the datastore file [datastore2] b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1230.025367] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e4c7c61b-332d-484f-b829-19262cd859d8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.032663] env[70020]: DEBUG oslo_vmware.api [None req-7c4359b5-dd06-4585-b0d3-74697e886d68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for the task: (returnval){ [ 1230.032663] env[70020]: value = "task-3619224" [ 1230.032663] env[70020]: _type = "Task" [ 1230.032663] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.042589] env[70020]: DEBUG oslo_vmware.api [None req-7c4359b5-dd06-4585-b0d3-74697e886d68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619224, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.106912] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8e7a72f4-3594-4768-84ca-e76428f6a3cb tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1230.107236] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8e7a72f4-3594-4768-84ca-e76428f6a3cb tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1230.107413] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e7a72f4-3594-4768-84ca-e76428f6a3cb tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Deleting the datastore file [datastore1] 9962b718-ca31-4f09-91f3-133dd68612ad {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1230.107734] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fc81e3b4-fde9-4204-bb8d-f67429ebec85 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.116420] env[70020]: DEBUG oslo_vmware.api [None req-8e7a72f4-3594-4768-84ca-e76428f6a3cb tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1230.116420] env[70020]: value = "task-3619225" [ 1230.116420] env[70020]: _type = "Task" [ 1230.116420] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.125794] env[70020]: DEBUG oslo_vmware.api [None req-8e7a72f4-3594-4768-84ca-e76428f6a3cb tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619225, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.356930] env[70020]: DEBUG oslo_vmware.api [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619221, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.51171} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.357201] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 115a8a58-d3ce-4778-9bc7-c75d0007b499/115a8a58-d3ce-4778-9bc7-c75d0007b499.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1230.358200] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1230.358200] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-05a19408-c4e9-474a-96ba-5cef620be8f4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.366337] env[70020]: DEBUG oslo_vmware.api [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1230.366337] env[70020]: value = "task-3619226" [ 1230.366337] env[70020]: _type = "Task" [ 1230.366337] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.379022] env[70020]: DEBUG oslo_vmware.api [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619226, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.385452] env[70020]: DEBUG oslo_vmware.api [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ee4837-83bc-72fc-6827-41a4fb60ed82, 'name': SearchDatastore_Task, 'duration_secs': 0.02277} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.385732] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1230.385992] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 602328f7-258a-44f5-802c-d580824beea0/602328f7-258a-44f5-802c-d580824beea0.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1230.386306] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-93ad5176-c4eb-4f92-a8b3-c5f743550ba1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.394751] env[70020]: DEBUG oslo_vmware.api [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Waiting for the task: (returnval){ [ 1230.394751] env[70020]: value = "task-3619227" [ 1230.394751] env[70020]: _type = "Task" [ 1230.394751] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.404336] env[70020]: DEBUG oslo_vmware.api [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': task-3619227, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.509530] env[70020]: DEBUG oslo_concurrency.lockutils [None req-24925911-8e27-4808-905a-c4e2d9d3084b tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "cc46e905-958e-4dc3-8f83-f8b5680f94de" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.079s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1230.510594] env[70020]: DEBUG oslo_concurrency.lockutils [req-5e124588-54a0-43c3-904f-adc6ad85b389 req-78bdb609-2e84-4b01-817c-e1f2a3dc9cc8 service nova] Acquired lock "cc46e905-958e-4dc3-8f83-f8b5680f94de" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1230.511688] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d84a652-a538-474c-95ba-bc7bb877583c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.521183] env[70020]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1230.521361] env[70020]: DEBUG oslo_vmware.api [-] Fault list: [ManagedObjectNotFound] {{(pid=70020) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1230.522008] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9b8ce3f4-7291-45f0-892c-a0ea7d9fb628 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.537341] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12871df7-cb39-435d-9cfa-3a6d162bce3a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.555859] env[70020]: DEBUG oslo_vmware.api [None req-7c4359b5-dd06-4585-b0d3-74697e886d68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Task: {'id': task-3619224, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.404256} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.556176] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c4359b5-dd06-4585-b0d3-74697e886d68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1230.556372] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-7c4359b5-dd06-4585-b0d3-74697e886d68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1230.556575] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-7c4359b5-dd06-4585-b0d3-74697e886d68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1230.556983] env[70020]: INFO nova.compute.manager [None req-7c4359b5-dd06-4585-b0d3-74697e886d68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1230.557176] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7c4359b5-dd06-4585-b0d3-74697e886d68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1230.557413] env[70020]: DEBUG nova.compute.manager [-] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1230.557554] env[70020]: DEBUG nova.network.neutron [-] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1230.576346] env[70020]: ERROR root [req-5e124588-54a0-43c3-904f-adc6ad85b389 req-78bdb609-2e84-4b01-817c-e1f2a3dc9cc8 service nova] Original exception being dropped: ['Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler\n response = request(managed_object, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__\n return client.invoke(args, kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke\n result = self.send(soapenv, timeout=timeout)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send\n return self.process_reply(reply.message, None, None)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply\n raise WebFault(fault, replyroot)\n', "suds.WebFault: Server raised fault: 'The object 'vim.VirtualMachine:vm-721818' has already been deleted or has not been completely created'\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api\n return api_method(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 479, in get_object_property\n props = get_object_properties(vim, moref, [property_name],\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 360, in get_object_properties\n retrieve_result = vim.RetrievePropertiesEx(\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler\n raise exceptions.VimFaultException(fault_list, fault_string,\n', "oslo_vmware.exceptions.VimFaultException: The object 'vim.VirtualMachine:vm-721818' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-721818' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-721818'}\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 123, in _call_method\n return self.invoke_api(module, method, self.vim, *args,\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api\n return _invoke_api(module, method, *args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func\n return evt.wait()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait\n result = hub.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch\n return self.greenlet.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner\n idle = self.f(*self.args, **self.kw)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func\n result = f(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 341, in _invoke_api\n raise clazz(str(excep),\n', "oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-721818' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-721818' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-721818'}\n"]: nova.exception.InstanceNotFound: Instance cc46e905-958e-4dc3-8f83-f8b5680f94de could not be found. [ 1230.576346] env[70020]: DEBUG oslo_concurrency.lockutils [req-5e124588-54a0-43c3-904f-adc6ad85b389 req-78bdb609-2e84-4b01-817c-e1f2a3dc9cc8 service nova] Releasing lock "cc46e905-958e-4dc3-8f83-f8b5680f94de" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1230.577254] env[70020]: DEBUG nova.compute.manager [req-5e124588-54a0-43c3-904f-adc6ad85b389 req-78bdb609-2e84-4b01-817c-e1f2a3dc9cc8 service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Detach interface failed, port_id=db65a4ad-ec52-4dd1-bb59-3c000719f018, reason: Instance cc46e905-958e-4dc3-8f83-f8b5680f94de could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1230.577254] env[70020]: DEBUG nova.compute.manager [req-5e124588-54a0-43c3-904f-adc6ad85b389 req-78bdb609-2e84-4b01-817c-e1f2a3dc9cc8 service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Received event network-vif-deleted-36da126c-3f5a-43c8-98eb-774da4ecb681 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1230.577254] env[70020]: INFO nova.compute.manager [req-5e124588-54a0-43c3-904f-adc6ad85b389 req-78bdb609-2e84-4b01-817c-e1f2a3dc9cc8 service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Neutron deleted interface 36da126c-3f5a-43c8-98eb-774da4ecb681; detaching it from the instance and deleting it from the info cache [ 1230.577254] env[70020]: DEBUG nova.network.neutron [req-5e124588-54a0-43c3-904f-adc6ad85b389 req-78bdb609-2e84-4b01-817c-e1f2a3dc9cc8 service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Updating instance_info_cache with network_info: [{"id": "0c413503-8ce1-454a-a6b0-3fb75d647a04", "address": "fa:16:3e:ca:8d:a7", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c413503-8c", "ovs_interfaceid": "0c413503-8ce1-454a-a6b0-3fb75d647a04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1230.633023] env[70020]: DEBUG oslo_vmware.api [None req-8e7a72f4-3594-4768-84ca-e76428f6a3cb tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619225, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.28199} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.633023] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e7a72f4-3594-4768-84ca-e76428f6a3cb tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1230.633023] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8e7a72f4-3594-4768-84ca-e76428f6a3cb tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1230.633023] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8e7a72f4-3594-4768-84ca-e76428f6a3cb tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1230.633023] env[70020]: INFO nova.compute.manager [None req-8e7a72f4-3594-4768-84ca-e76428f6a3cb tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1230.633023] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8e7a72f4-3594-4768-84ca-e76428f6a3cb tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1230.633407] env[70020]: DEBUG nova.compute.manager [-] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1230.633407] env[70020]: DEBUG nova.network.neutron [-] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1230.677078] env[70020]: DEBUG nova.objects.instance [None req-8b4c182e-4e2b-4504-84c6-58f900460082 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lazy-loading 'flavor' on Instance uuid 1b25f8db-457e-4948-b9da-35e2fa5b897e {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1230.881705] env[70020]: DEBUG oslo_vmware.api [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619226, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069997} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.881705] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1230.881705] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dd8773f-aab0-4966-91ed-83ee4775e77a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.904613] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Reconfiguring VM instance instance-00000077 to attach disk [datastore2] 115a8a58-d3ce-4778-9bc7-c75d0007b499/115a8a58-d3ce-4778-9bc7-c75d0007b499.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1230.910934] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d3689b09-1a62-4b56-8432-45b9a61fd296 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.938027] env[70020]: DEBUG oslo_vmware.api [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': task-3619227, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.450485} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.938027] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 602328f7-258a-44f5-802c-d580824beea0/602328f7-258a-44f5-802c-d580824beea0.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1230.938027] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1230.938027] env[70020]: DEBUG oslo_vmware.api [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1230.938027] env[70020]: value = "task-3619228" [ 1230.938027] env[70020]: _type = "Task" [ 1230.938027] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.938027] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-840c26a1-7cd5-4038-8fa7-73ce84283aee {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.951189] env[70020]: DEBUG oslo_vmware.api [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619228, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.952852] env[70020]: DEBUG oslo_vmware.api [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Waiting for the task: (returnval){ [ 1230.952852] env[70020]: value = "task-3619229" [ 1230.952852] env[70020]: _type = "Task" [ 1230.952852] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.962365] env[70020]: DEBUG oslo_vmware.api [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': task-3619229, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.999249] env[70020]: DEBUG nova.compute.manager [req-1fd99087-e931-4c75-a7b2-4e456678a045 req-e9a183f3-1fbf-4046-b5c0-6501f0cf701e service nova] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Received event network-vif-deleted-9e9d26c4-eeea-4e28-84a1-156d81e4466a {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1230.999444] env[70020]: INFO nova.compute.manager [req-1fd99087-e931-4c75-a7b2-4e456678a045 req-e9a183f3-1fbf-4046-b5c0-6501f0cf701e service nova] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Neutron deleted interface 9e9d26c4-eeea-4e28-84a1-156d81e4466a; detaching it from the instance and deleting it from the info cache [ 1230.999781] env[70020]: DEBUG nova.network.neutron [req-1fd99087-e931-4c75-a7b2-4e456678a045 req-e9a183f3-1fbf-4046-b5c0-6501f0cf701e service nova] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1231.082825] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fa7bbd5d-f1f3-4fe5-b730-53153b924e1f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.089597] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89e8ea45-f65c-46ed-9454-f4c9e1dd9c28 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.124104] env[70020]: DEBUG nova.compute.manager [req-5e124588-54a0-43c3-904f-adc6ad85b389 req-78bdb609-2e84-4b01-817c-e1f2a3dc9cc8 service nova] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Detach interface failed, port_id=36da126c-3f5a-43c8-98eb-774da4ecb681, reason: Instance cc46e905-958e-4dc3-8f83-f8b5680f94de could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1231.186672] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8b4c182e-4e2b-4504-84c6-58f900460082 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "1b25f8db-457e-4948-b9da-35e2fa5b897e" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.290s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1231.249093] env[70020]: DEBUG oslo_concurrency.lockutils [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "d28f6dff-8f9f-41d4-87ae-0ff87327d042" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1231.249093] env[70020]: DEBUG oslo_concurrency.lockutils [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "d28f6dff-8f9f-41d4-87ae-0ff87327d042" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1231.428684] env[70020]: DEBUG nova.network.neutron [-] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1231.452360] env[70020]: DEBUG oslo_vmware.api [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619228, 'name': ReconfigVM_Task, 'duration_secs': 0.28935} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.452658] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Reconfigured VM instance instance-00000077 to attach disk [datastore2] 115a8a58-d3ce-4778-9bc7-c75d0007b499/115a8a58-d3ce-4778-9bc7-c75d0007b499.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1231.453310] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e6b2211e-4cb8-4486-8532-6db51f8ba820 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.464057] env[70020]: DEBUG oslo_vmware.api [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': task-3619229, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074954} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.465190] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1231.465523] env[70020]: DEBUG oslo_vmware.api [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1231.465523] env[70020]: value = "task-3619230" [ 1231.465523] env[70020]: _type = "Task" [ 1231.465523] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.466205] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cec816fc-db4f-4c57-b45b-930347c26e73 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.476320] env[70020]: DEBUG oslo_vmware.api [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619230, 'name': Rename_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.485562] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f0c2a409-88f4-4de2-afac-ec25bbf4e65e tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "1b25f8db-457e-4948-b9da-35e2fa5b897e" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1231.486074] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f0c2a409-88f4-4de2-afac-ec25bbf4e65e tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "1b25f8db-457e-4948-b9da-35e2fa5b897e" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1231.496880] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Reconfiguring VM instance instance-00000078 to attach disk [datastore2] 602328f7-258a-44f5-802c-d580824beea0/602328f7-258a-44f5-802c-d580824beea0.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1231.499538] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-69ea3812-e200-4895-b1b0-3598f595e6b9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.517893] env[70020]: DEBUG nova.network.neutron [-] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1231.520289] env[70020]: DEBUG nova.compute.manager [req-adc907d8-8d04-435d-aafa-969c96e9f82b req-a06026da-3921-4ef1-acd6-0e6148e8f93b service nova] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Received event network-vif-deleted-6c9093b6-4b0e-4ac8-b1b1-4e54e43363a5 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1231.520482] env[70020]: INFO nova.compute.manager [req-adc907d8-8d04-435d-aafa-969c96e9f82b req-a06026da-3921-4ef1-acd6-0e6148e8f93b service nova] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Neutron deleted interface 6c9093b6-4b0e-4ac8-b1b1-4e54e43363a5; detaching it from the instance and deleting it from the info cache [ 1231.520647] env[70020]: DEBUG nova.network.neutron [req-adc907d8-8d04-435d-aafa-969c96e9f82b req-a06026da-3921-4ef1-acd6-0e6148e8f93b service nova] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1231.522100] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3c7876c0-8464-4960-801d-7c17e79f6c96 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.530889] env[70020]: DEBUG oslo_vmware.api [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Waiting for the task: (returnval){ [ 1231.530889] env[70020]: value = "task-3619231" [ 1231.530889] env[70020]: _type = "Task" [ 1231.530889] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.538344] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfd1eca5-557f-4bbe-9a02-e795f4079fc6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.555480] env[70020]: DEBUG oslo_vmware.api [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': task-3619231, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.575352] env[70020]: DEBUG nova.compute.manager [req-1fd99087-e931-4c75-a7b2-4e456678a045 req-e9a183f3-1fbf-4046-b5c0-6501f0cf701e service nova] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Detach interface failed, port_id=9e9d26c4-eeea-4e28-84a1-156d81e4466a, reason: Instance b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1231.754027] env[70020]: DEBUG nova.compute.manager [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1231.933589] env[70020]: INFO nova.compute.manager [-] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Took 1.38 seconds to deallocate network for instance. [ 1231.979634] env[70020]: DEBUG oslo_vmware.api [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619230, 'name': Rename_Task, 'duration_secs': 0.148042} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.980184] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1231.980184] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-729fe9bf-ae6a-40b9-85aa-91a2d50f4b98 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.987697] env[70020]: DEBUG oslo_vmware.api [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1231.987697] env[70020]: value = "task-3619232" [ 1231.987697] env[70020]: _type = "Task" [ 1231.987697] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.996726] env[70020]: DEBUG oslo_vmware.api [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619232, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.998368] env[70020]: INFO nova.compute.manager [None req-f0c2a409-88f4-4de2-afac-ec25bbf4e65e tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Detaching volume 401287c7-6649-4da2-80e2-87b30ea658bd [ 1232.022511] env[70020]: INFO nova.compute.manager [-] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Took 1.39 seconds to deallocate network for instance. [ 1232.029838] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e93ec8b0-aac0-497a-adfa-4bae9ca733e2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.044688] env[70020]: DEBUG oslo_vmware.api [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': task-3619231, 'name': ReconfigVM_Task, 'duration_secs': 0.287715} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.044688] env[70020]: INFO nova.virt.block_device [None req-f0c2a409-88f4-4de2-afac-ec25bbf4e65e tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Attempting to driver detach volume 401287c7-6649-4da2-80e2-87b30ea658bd from mountpoint /dev/sdb [ 1232.044688] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0c2a409-88f4-4de2-afac-ec25bbf4e65e tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Volume detach. Driver type: vmdk {{(pid=70020) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1232.044688] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0c2a409-88f4-4de2-afac-ec25bbf4e65e tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721831', 'volume_id': '401287c7-6649-4da2-80e2-87b30ea658bd', 'name': 'volume-401287c7-6649-4da2-80e2-87b30ea658bd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1b25f8db-457e-4948-b9da-35e2fa5b897e', 'attached_at': '', 'detached_at': '', 'volume_id': '401287c7-6649-4da2-80e2-87b30ea658bd', 'serial': '401287c7-6649-4da2-80e2-87b30ea658bd'} {{(pid=70020) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1232.045945] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Reconfigured VM instance instance-00000078 to attach disk [datastore2] 602328f7-258a-44f5-802c-d580824beea0/602328f7-258a-44f5-802c-d580824beea0.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1232.046376] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7c8cb1d-e6c9-4701-99d9-611ce5151205 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.049927] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2df4258e-4b1e-4ca2-8e16-8a49cc78383c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.054370] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-397fed5d-3e38-418c-bfa6-42ca5987b199 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.096128] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ea3f888-5d07-42db-bf78-2e56a17b7aeb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.099226] env[70020]: DEBUG nova.compute.manager [req-adc907d8-8d04-435d-aafa-969c96e9f82b req-a06026da-3921-4ef1-acd6-0e6148e8f93b service nova] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Detach interface failed, port_id=6c9093b6-4b0e-4ac8-b1b1-4e54e43363a5, reason: Instance 9962b718-ca31-4f09-91f3-133dd68612ad could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1232.099690] env[70020]: DEBUG oslo_vmware.api [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Waiting for the task: (returnval){ [ 1232.099690] env[70020]: value = "task-3619233" [ 1232.099690] env[70020]: _type = "Task" [ 1232.099690] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.108633] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-109722f8-4605-4899-96ba-3bdaefe56dd9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.111087] env[70020]: DEBUG oslo_vmware.api [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': task-3619233, 'name': Rename_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.131954] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8636aad-79aa-41ff-aac6-2ef3619058e3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.147809] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0c2a409-88f4-4de2-afac-ec25bbf4e65e tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] The volume has not been displaced from its original location: [datastore2] volume-401287c7-6649-4da2-80e2-87b30ea658bd/volume-401287c7-6649-4da2-80e2-87b30ea658bd.vmdk. No consolidation needed. {{(pid=70020) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1232.153494] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0c2a409-88f4-4de2-afac-ec25bbf4e65e tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Reconfiguring VM instance instance-00000073 to detach disk 2001 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1232.153900] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f3156ada-7c97-407f-a8a0-7371fe7dd86e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.176047] env[70020]: DEBUG oslo_vmware.api [None req-f0c2a409-88f4-4de2-afac-ec25bbf4e65e tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 1232.176047] env[70020]: value = "task-3619234" [ 1232.176047] env[70020]: _type = "Task" [ 1232.176047] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.186130] env[70020]: DEBUG oslo_vmware.api [None req-f0c2a409-88f4-4de2-afac-ec25bbf4e65e tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3619234, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.275628] env[70020]: DEBUG oslo_concurrency.lockutils [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1232.275972] env[70020]: DEBUG oslo_concurrency.lockutils [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1232.277694] env[70020]: INFO nova.compute.claims [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1232.440537] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7c4359b5-dd06-4585-b0d3-74697e886d68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1232.498131] env[70020]: DEBUG oslo_vmware.api [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619232, 'name': PowerOnVM_Task, 'duration_secs': 0.48689} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.498473] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1232.498679] env[70020]: INFO nova.compute.manager [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Took 6.96 seconds to spawn the instance on the hypervisor. [ 1232.498870] env[70020]: DEBUG nova.compute.manager [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1232.499840] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f179090-8e8c-49bd-be60-069d58d06b63 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.533712] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8e7a72f4-3594-4768-84ca-e76428f6a3cb tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1232.609867] env[70020]: DEBUG oslo_vmware.api [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': task-3619233, 'name': Rename_Task, 'duration_secs': 0.143431} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.610210] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1232.610463] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0e5045a1-7482-40db-8153-a6b545bca7ac {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.616503] env[70020]: DEBUG oslo_vmware.api [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Waiting for the task: (returnval){ [ 1232.616503] env[70020]: value = "task-3619235" [ 1232.616503] env[70020]: _type = "Task" [ 1232.616503] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.624059] env[70020]: DEBUG oslo_vmware.api [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': task-3619235, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.686434] env[70020]: DEBUG oslo_vmware.api [None req-f0c2a409-88f4-4de2-afac-ec25bbf4e65e tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3619234, 'name': ReconfigVM_Task, 'duration_secs': 0.216171} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.686434] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0c2a409-88f4-4de2-afac-ec25bbf4e65e tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Reconfigured VM instance instance-00000073 to detach disk 2001 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1232.690545] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b8cc8f5a-2317-4a75-ad0f-93ee50e8b774 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.704978] env[70020]: DEBUG oslo_vmware.api [None req-f0c2a409-88f4-4de2-afac-ec25bbf4e65e tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 1232.704978] env[70020]: value = "task-3619236" [ 1232.704978] env[70020]: _type = "Task" [ 1232.704978] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.712526] env[70020]: DEBUG oslo_vmware.api [None req-f0c2a409-88f4-4de2-afac-ec25bbf4e65e tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3619236, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.018917] env[70020]: INFO nova.compute.manager [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Took 17.61 seconds to build instance. [ 1233.068923] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "e96aae17-5ae5-404b-bbe3-46777f7c34d2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1233.069205] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "e96aae17-5ae5-404b-bbe3-46777f7c34d2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1233.126524] env[70020]: DEBUG oslo_vmware.api [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': task-3619235, 'name': PowerOnVM_Task, 'duration_secs': 0.466691} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.126903] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1233.127150] env[70020]: INFO nova.compute.manager [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Took 5.33 seconds to spawn the instance on the hypervisor. [ 1233.127374] env[70020]: DEBUG nova.compute.manager [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1233.128238] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11d9c33c-9d32-491f-b67b-5714a3c185c3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.214722] env[70020]: DEBUG oslo_vmware.api [None req-f0c2a409-88f4-4de2-afac-ec25bbf4e65e tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3619236, 'name': ReconfigVM_Task, 'duration_secs': 0.139787} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.215071] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0c2a409-88f4-4de2-afac-ec25bbf4e65e tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721831', 'volume_id': '401287c7-6649-4da2-80e2-87b30ea658bd', 'name': 'volume-401287c7-6649-4da2-80e2-87b30ea658bd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1b25f8db-457e-4948-b9da-35e2fa5b897e', 'attached_at': '', 'detached_at': '', 'volume_id': '401287c7-6649-4da2-80e2-87b30ea658bd', 'serial': '401287c7-6649-4da2-80e2-87b30ea658bd'} {{(pid=70020) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1233.412510] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19ef4af1-2a2d-47ce-8dac-e93b3502ee22 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.420305] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05b2f2eb-24c2-459a-9db7-92c05ab86ed3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.451754] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a78dadf1-27cb-4bc5-ba23-58676a2e42db {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.461579] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b811708-effb-4260-ab30-cd290eda47b7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.480707] env[70020]: DEBUG nova.compute.provider_tree [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1233.521503] env[70020]: DEBUG oslo_concurrency.lockutils [None req-110c7dd6-57bf-4747-8fa6-71c4adfb20ed tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "115a8a58-d3ce-4778-9bc7-c75d0007b499" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.117s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1233.571935] env[70020]: DEBUG nova.compute.manager [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1233.644671] env[70020]: INFO nova.compute.manager [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Took 11.37 seconds to build instance. [ 1233.759536] env[70020]: DEBUG nova.objects.instance [None req-f0c2a409-88f4-4de2-afac-ec25bbf4e65e tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lazy-loading 'flavor' on Instance uuid 1b25f8db-457e-4948-b9da-35e2fa5b897e {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1233.985096] env[70020]: DEBUG nova.scheduler.client.report [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1234.090519] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1234.113075] env[70020]: DEBUG oslo_concurrency.lockutils [None req-880b699d-86f4-41d1-b850-092f6590290c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "115a8a58-d3ce-4778-9bc7-c75d0007b499" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1234.113338] env[70020]: DEBUG oslo_concurrency.lockutils [None req-880b699d-86f4-41d1-b850-092f6590290c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "115a8a58-d3ce-4778-9bc7-c75d0007b499" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1234.113544] env[70020]: DEBUG oslo_concurrency.lockutils [None req-880b699d-86f4-41d1-b850-092f6590290c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "115a8a58-d3ce-4778-9bc7-c75d0007b499-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1234.113723] env[70020]: DEBUG oslo_concurrency.lockutils [None req-880b699d-86f4-41d1-b850-092f6590290c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "115a8a58-d3ce-4778-9bc7-c75d0007b499-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1234.113890] env[70020]: DEBUG oslo_concurrency.lockutils [None req-880b699d-86f4-41d1-b850-092f6590290c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "115a8a58-d3ce-4778-9bc7-c75d0007b499-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1234.115828] env[70020]: INFO nova.compute.manager [None req-880b699d-86f4-41d1-b850-092f6590290c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Terminating instance [ 1234.146276] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b35ad1c6-0014-43f8-9d9f-6d8bb2977b37 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Lock "602328f7-258a-44f5-802c-d580824beea0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.878s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1234.489766] env[70020]: DEBUG oslo_concurrency.lockutils [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.213s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1234.489964] env[70020]: DEBUG nova.compute.manager [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1234.492782] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7c4359b5-dd06-4585-b0d3-74697e886d68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.052s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1234.493016] env[70020]: DEBUG nova.objects.instance [None req-7c4359b5-dd06-4585-b0d3-74697e886d68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lazy-loading 'resources' on Instance uuid b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1234.623192] env[70020]: INFO nova.compute.manager [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Rebuilding instance [ 1234.626031] env[70020]: DEBUG nova.compute.manager [None req-880b699d-86f4-41d1-b850-092f6590290c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1234.626031] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-880b699d-86f4-41d1-b850-092f6590290c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1234.626777] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ea5c9fd-1510-483a-b97b-6245ae74eabf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.634746] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-880b699d-86f4-41d1-b850-092f6590290c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1234.634969] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ced03d08-d37d-4241-87ec-31b5f4a08fce {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.641978] env[70020]: DEBUG oslo_vmware.api [None req-880b699d-86f4-41d1-b850-092f6590290c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1234.641978] env[70020]: value = "task-3619237" [ 1234.641978] env[70020]: _type = "Task" [ 1234.641978] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.652639] env[70020]: DEBUG oslo_vmware.api [None req-880b699d-86f4-41d1-b850-092f6590290c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619237, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.658494] env[70020]: DEBUG nova.compute.manager [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1234.659252] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8753b423-cbd8-4cf2-8839-7f86cafce343 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.766292] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f0c2a409-88f4-4de2-afac-ec25bbf4e65e tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "1b25f8db-457e-4948-b9da-35e2fa5b897e" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.280s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1234.927835] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0914f50d-69f9-4324-a198-a07653d027c1 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "1b25f8db-457e-4948-b9da-35e2fa5b897e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1234.928108] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0914f50d-69f9-4324-a198-a07653d027c1 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "1b25f8db-457e-4948-b9da-35e2fa5b897e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1234.928322] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0914f50d-69f9-4324-a198-a07653d027c1 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "1b25f8db-457e-4948-b9da-35e2fa5b897e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1234.928504] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0914f50d-69f9-4324-a198-a07653d027c1 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "1b25f8db-457e-4948-b9da-35e2fa5b897e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1234.928676] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0914f50d-69f9-4324-a198-a07653d027c1 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "1b25f8db-457e-4948-b9da-35e2fa5b897e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1234.931194] env[70020]: INFO nova.compute.manager [None req-0914f50d-69f9-4324-a198-a07653d027c1 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Terminating instance [ 1234.998749] env[70020]: DEBUG nova.compute.utils [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1235.000555] env[70020]: DEBUG nova.compute.manager [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1235.000757] env[70020]: DEBUG nova.network.neutron [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1235.039080] env[70020]: DEBUG nova.policy [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '291265cdc1164603a9011173b1457c31', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '74b060ffb3ac4ecd95dcd85d4744dc2a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1235.114221] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7448350-cf18-4534-b845-facd609601f5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.121772] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1126e3c6-8960-4952-952f-1562eb9574c3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.154915] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbd12276-3efb-46ce-a343-8748e5c85a27 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.161948] env[70020]: DEBUG oslo_vmware.api [None req-880b699d-86f4-41d1-b850-092f6590290c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619237, 'name': PowerOffVM_Task, 'duration_secs': 0.174763} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.163839] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-880b699d-86f4-41d1-b850-092f6590290c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1235.164045] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-880b699d-86f4-41d1-b850-092f6590290c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1235.164274] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-da16b454-8f03-4c70-aecf-7a87cd8fc135 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.166606] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d197bf3-9aa9-4875-8997-5fb2d3f18438 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.181907] env[70020]: DEBUG nova.compute.provider_tree [None req-7c4359b5-dd06-4585-b0d3-74697e886d68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1235.233378] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-880b699d-86f4-41d1-b850-092f6590290c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1235.233640] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-880b699d-86f4-41d1-b850-092f6590290c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1235.233876] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-880b699d-86f4-41d1-b850-092f6590290c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Deleting the datastore file [datastore2] 115a8a58-d3ce-4778-9bc7-c75d0007b499 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1235.234205] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f13aa380-bda3-41f7-a7b7-b05a21791e6d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.242175] env[70020]: DEBUG oslo_vmware.api [None req-880b699d-86f4-41d1-b850-092f6590290c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for the task: (returnval){ [ 1235.242175] env[70020]: value = "task-3619239" [ 1235.242175] env[70020]: _type = "Task" [ 1235.242175] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.249688] env[70020]: DEBUG oslo_vmware.api [None req-880b699d-86f4-41d1-b850-092f6590290c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619239, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.320850] env[70020]: DEBUG nova.network.neutron [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Successfully created port: c963937e-c9c9-452b-a0d2-b2a4314681dd {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1235.434468] env[70020]: DEBUG nova.compute.manager [None req-0914f50d-69f9-4324-a198-a07653d027c1 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1235.434748] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0914f50d-69f9-4324-a198-a07653d027c1 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1235.435640] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-863bd4f1-7754-41db-82f9-44a6c08eca59 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.443136] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0914f50d-69f9-4324-a198-a07653d027c1 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1235.443360] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f6665be7-7940-4dfd-b5cf-885aeeb0bf74 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.448970] env[70020]: DEBUG oslo_vmware.api [None req-0914f50d-69f9-4324-a198-a07653d027c1 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 1235.448970] env[70020]: value = "task-3619240" [ 1235.448970] env[70020]: _type = "Task" [ 1235.448970] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.456836] env[70020]: DEBUG oslo_vmware.api [None req-0914f50d-69f9-4324-a198-a07653d027c1 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3619240, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.504373] env[70020]: DEBUG nova.compute.manager [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1235.674766] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1235.675162] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e51db39f-9030-4628-9261-8c506236081a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.683148] env[70020]: DEBUG oslo_vmware.api [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Waiting for the task: (returnval){ [ 1235.683148] env[70020]: value = "task-3619241" [ 1235.683148] env[70020]: _type = "Task" [ 1235.683148] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.686795] env[70020]: DEBUG nova.scheduler.client.report [None req-7c4359b5-dd06-4585-b0d3-74697e886d68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1235.695301] env[70020]: DEBUG oslo_vmware.api [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': task-3619241, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.750653] env[70020]: DEBUG oslo_vmware.api [None req-880b699d-86f4-41d1-b850-092f6590290c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Task: {'id': task-3619239, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155192} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.750890] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-880b699d-86f4-41d1-b850-092f6590290c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1235.751089] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-880b699d-86f4-41d1-b850-092f6590290c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1235.751271] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-880b699d-86f4-41d1-b850-092f6590290c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1235.751440] env[70020]: INFO nova.compute.manager [None req-880b699d-86f4-41d1-b850-092f6590290c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1235.751672] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-880b699d-86f4-41d1-b850-092f6590290c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1235.751858] env[70020]: DEBUG nova.compute.manager [-] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1235.751951] env[70020]: DEBUG nova.network.neutron [-] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1235.960938] env[70020]: DEBUG oslo_vmware.api [None req-0914f50d-69f9-4324-a198-a07653d027c1 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3619240, 'name': PowerOffVM_Task, 'duration_secs': 0.254545} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.961281] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0914f50d-69f9-4324-a198-a07653d027c1 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1235.961457] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0914f50d-69f9-4324-a198-a07653d027c1 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1235.961713] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-032c7183-e96c-42f3-b616-260ed960c8d2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.009556] env[70020]: INFO nova.virt.block_device [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Booting with volume 019bffbe-24bf-4b30-80fe-f387c8bba21b at /dev/sda [ 1236.037420] env[70020]: DEBUG nova.compute.manager [req-fa176590-90d9-44c8-9cde-405caef1f1ff req-1ca5ebf5-686d-406b-b64c-a6b293387364 service nova] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Received event network-vif-deleted-954d7579-1660-4476-afe2-3759d551ef0c {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1236.037751] env[70020]: INFO nova.compute.manager [req-fa176590-90d9-44c8-9cde-405caef1f1ff req-1ca5ebf5-686d-406b-b64c-a6b293387364 service nova] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Neutron deleted interface 954d7579-1660-4476-afe2-3759d551ef0c; detaching it from the instance and deleting it from the info cache [ 1236.037988] env[70020]: DEBUG nova.network.neutron [req-fa176590-90d9-44c8-9cde-405caef1f1ff req-1ca5ebf5-686d-406b-b64c-a6b293387364 service nova] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1236.044204] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0914f50d-69f9-4324-a198-a07653d027c1 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1236.044411] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0914f50d-69f9-4324-a198-a07653d027c1 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1236.044619] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-0914f50d-69f9-4324-a198-a07653d027c1 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Deleting the datastore file [datastore1] 1b25f8db-457e-4948-b9da-35e2fa5b897e {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1236.045742] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-75353aa4-3a67-4dc6-af67-56a3aa2e212b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.048139] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2ada3d94-bd7b-4d5f-828f-d4807b53d354 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.056514] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d88f4cf8-cdc6-41bd-8ab2-12ac52904c5f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.068697] env[70020]: DEBUG oslo_vmware.api [None req-0914f50d-69f9-4324-a198-a07653d027c1 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for the task: (returnval){ [ 1236.068697] env[70020]: value = "task-3619243" [ 1236.068697] env[70020]: _type = "Task" [ 1236.068697] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.077376] env[70020]: DEBUG oslo_vmware.api [None req-0914f50d-69f9-4324-a198-a07653d027c1 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3619243, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.086469] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ebf0a6a7-8c97-4379-be4c-5c6d8da89e24 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.093844] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ddb29cd-fa6e-4925-96e8-c1e2dc6360db {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.122859] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1b18a82-3626-4b50-976e-b37c11ddcea7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.129090] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6193154-af86-40c6-b4e2-1464b7263e57 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.142330] env[70020]: DEBUG nova.virt.block_device [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Updating existing volume attachment record: 13cf2884-88e7-465c-b378-05b16f9bd2a1 {{(pid=70020) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1236.192720] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7c4359b5-dd06-4585-b0d3-74697e886d68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.700s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1236.194615] env[70020]: DEBUG oslo_vmware.api [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': task-3619241, 'name': PowerOffVM_Task, 'duration_secs': 0.169984} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.195752] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8e7a72f4-3594-4768-84ca-e76428f6a3cb tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.661s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1236.195938] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8e7a72f4-3594-4768-84ca-e76428f6a3cb tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1236.197981] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.108s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1236.199506] env[70020]: INFO nova.compute.claims [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1236.202047] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1236.202639] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1236.204609] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2dbc03f-45f0-4f74-956f-7b95e48f77ef {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.213721] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1236.214042] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4bd3c67a-7519-4a1f-a76d-4a9545e7afdf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.221324] env[70020]: INFO nova.scheduler.client.report [None req-8e7a72f4-3594-4768-84ca-e76428f6a3cb tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Deleted allocations for instance 9962b718-ca31-4f09-91f3-133dd68612ad [ 1236.223906] env[70020]: INFO nova.scheduler.client.report [None req-7c4359b5-dd06-4585-b0d3-74697e886d68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Deleted allocations for instance b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d [ 1236.238497] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1236.238615] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1236.238794] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Deleting the datastore file [datastore2] 602328f7-258a-44f5-802c-d580824beea0 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1236.239322] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f38a2698-c809-4fb7-9bfe-7d8499337f46 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.246092] env[70020]: DEBUG oslo_vmware.api [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Waiting for the task: (returnval){ [ 1236.246092] env[70020]: value = "task-3619245" [ 1236.246092] env[70020]: _type = "Task" [ 1236.246092] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.254070] env[70020]: DEBUG oslo_vmware.api [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': task-3619245, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.514437] env[70020]: DEBUG nova.network.neutron [-] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1236.540564] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2f313ffe-d080-40ee-95ff-b1da0b3a0cb1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.550990] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9187747f-b204-4b03-a55d-959499a2e7f4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.582805] env[70020]: DEBUG nova.compute.manager [req-fa176590-90d9-44c8-9cde-405caef1f1ff req-1ca5ebf5-686d-406b-b64c-a6b293387364 service nova] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Detach interface failed, port_id=954d7579-1660-4476-afe2-3759d551ef0c, reason: Instance 115a8a58-d3ce-4778-9bc7-c75d0007b499 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1236.587999] env[70020]: DEBUG oslo_vmware.api [None req-0914f50d-69f9-4324-a198-a07653d027c1 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Task: {'id': task-3619243, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.1718} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.589022] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-0914f50d-69f9-4324-a198-a07653d027c1 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1236.589200] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0914f50d-69f9-4324-a198-a07653d027c1 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1236.589382] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0914f50d-69f9-4324-a198-a07653d027c1 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1236.589555] env[70020]: INFO nova.compute.manager [None req-0914f50d-69f9-4324-a198-a07653d027c1 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1236.589792] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0914f50d-69f9-4324-a198-a07653d027c1 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1236.589986] env[70020]: DEBUG nova.compute.manager [-] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1236.590208] env[70020]: DEBUG nova.network.neutron [-] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1236.733919] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8e7a72f4-3594-4768-84ca-e76428f6a3cb tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "9962b718-ca31-4f09-91f3-133dd68612ad" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.753s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1236.738016] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7c4359b5-dd06-4585-b0d3-74697e886d68 tempest-ServerRescueNegativeTestJSON-1808745105 tempest-ServerRescueNegativeTestJSON-1808745105-project-member] Lock "b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.819s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1236.760708] env[70020]: DEBUG oslo_vmware.api [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': task-3619245, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.098589} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.760957] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1236.762028] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1236.765047] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1236.786775] env[70020]: DEBUG nova.network.neutron [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Successfully updated port: c963937e-c9c9-452b-a0d2-b2a4314681dd {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1236.963801] env[70020]: DEBUG nova.compute.manager [req-69baa6f4-2ed7-4260-89c2-ab7376480b9d req-37102877-2e9f-4a48-8d3a-6e11818b307a service nova] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Received event network-vif-plugged-c963937e-c9c9-452b-a0d2-b2a4314681dd {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1236.964392] env[70020]: DEBUG oslo_concurrency.lockutils [req-69baa6f4-2ed7-4260-89c2-ab7376480b9d req-37102877-2e9f-4a48-8d3a-6e11818b307a service nova] Acquiring lock "d28f6dff-8f9f-41d4-87ae-0ff87327d042-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1236.964392] env[70020]: DEBUG oslo_concurrency.lockutils [req-69baa6f4-2ed7-4260-89c2-ab7376480b9d req-37102877-2e9f-4a48-8d3a-6e11818b307a service nova] Lock "d28f6dff-8f9f-41d4-87ae-0ff87327d042-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1236.964392] env[70020]: DEBUG oslo_concurrency.lockutils [req-69baa6f4-2ed7-4260-89c2-ab7376480b9d req-37102877-2e9f-4a48-8d3a-6e11818b307a service nova] Lock "d28f6dff-8f9f-41d4-87ae-0ff87327d042-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1236.964812] env[70020]: DEBUG nova.compute.manager [req-69baa6f4-2ed7-4260-89c2-ab7376480b9d req-37102877-2e9f-4a48-8d3a-6e11818b307a service nova] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] No waiting events found dispatching network-vif-plugged-c963937e-c9c9-452b-a0d2-b2a4314681dd {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1236.964974] env[70020]: WARNING nova.compute.manager [req-69baa6f4-2ed7-4260-89c2-ab7376480b9d req-37102877-2e9f-4a48-8d3a-6e11818b307a service nova] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Received unexpected event network-vif-plugged-c963937e-c9c9-452b-a0d2-b2a4314681dd for instance with vm_state building and task_state block_device_mapping. [ 1237.016974] env[70020]: INFO nova.compute.manager [-] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Took 1.26 seconds to deallocate network for instance. [ 1237.295290] env[70020]: DEBUG oslo_concurrency.lockutils [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "refresh_cache-d28f6dff-8f9f-41d4-87ae-0ff87327d042" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1237.295290] env[70020]: DEBUG oslo_concurrency.lockutils [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquired lock "refresh_cache-d28f6dff-8f9f-41d4-87ae-0ff87327d042" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1237.295290] env[70020]: DEBUG nova.network.neutron [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1237.356260] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67548f8c-a610-4fd0-abaa-cd29c64c2e0b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.364884] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48f4999d-416a-4c50-8a7d-6c894c8f09ec {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.403744] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66aec7c0-07c0-4faf-84b0-3f0f06c64d11 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.414092] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cb68e6e-34cc-4b42-9e96-eb0a4d99f71b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.427157] env[70020]: DEBUG nova.compute.provider_tree [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1237.523329] env[70020]: DEBUG oslo_concurrency.lockutils [None req-880b699d-86f4-41d1-b850-092f6590290c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1237.812477] env[70020]: DEBUG nova.virt.hardware [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1237.812715] env[70020]: DEBUG nova.virt.hardware [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1237.812907] env[70020]: DEBUG nova.virt.hardware [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1237.813123] env[70020]: DEBUG nova.virt.hardware [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1237.813310] env[70020]: DEBUG nova.virt.hardware [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1237.813434] env[70020]: DEBUG nova.virt.hardware [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1237.813614] env[70020]: DEBUG nova.virt.hardware [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1237.813771] env[70020]: DEBUG nova.virt.hardware [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1237.813932] env[70020]: DEBUG nova.virt.hardware [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1237.816440] env[70020]: DEBUG nova.virt.hardware [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1237.816440] env[70020]: DEBUG nova.virt.hardware [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1237.816440] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2ac7f03-e6a1-41c6-9564-9f2076a4be70 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.824461] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0013efdf-7222-4851-92a7-4ab04bbe00b6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.831093] env[70020]: DEBUG nova.network.neutron [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1237.844305] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Instance VIF info [] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1237.851431] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1237.851793] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 602328f7-258a-44f5-802c-d580824beea0] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1237.852106] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c35b9e7d-d47e-4f0c-9e39-eb18ff934003 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.871783] env[70020]: DEBUG oslo_concurrency.lockutils [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "845ea37a-9945-49cd-a1bd-3da91f4af16b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1237.872015] env[70020]: DEBUG oslo_concurrency.lockutils [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "845ea37a-9945-49cd-a1bd-3da91f4af16b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1237.876727] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1237.876727] env[70020]: value = "task-3619246" [ 1237.876727] env[70020]: _type = "Task" [ 1237.876727] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.885415] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619246, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.930254] env[70020]: DEBUG nova.scheduler.client.report [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1238.005213] env[70020]: DEBUG nova.network.neutron [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Updating instance_info_cache with network_info: [{"id": "c963937e-c9c9-452b-a0d2-b2a4314681dd", "address": "fa:16:3e:d7:7d:d0", "network": {"id": "b566519a-edda-4c57-92ca-a702e586c638", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-790170115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74b060ffb3ac4ecd95dcd85d4744dc2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc963937e-c9", "ovs_interfaceid": "c963937e-c9c9-452b-a0d2-b2a4314681dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1238.021167] env[70020]: DEBUG nova.network.neutron [-] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1238.066632] env[70020]: DEBUG nova.compute.manager [req-40753adf-5645-4130-bbd6-0d9c962f8846 req-9a98aaa7-d2e1-42db-a98d-e4d438bb4f95 service nova] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Received event network-vif-deleted-f17543b2-5415-422a-b395-b7aa575543a0 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1238.228113] env[70020]: DEBUG nova.compute.manager [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1238.228648] env[70020]: DEBUG nova.virt.hardware [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1238.228861] env[70020]: DEBUG nova.virt.hardware [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1238.229189] env[70020]: DEBUG nova.virt.hardware [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1238.229424] env[70020]: DEBUG nova.virt.hardware [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1238.229579] env[70020]: DEBUG nova.virt.hardware [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1238.229729] env[70020]: DEBUG nova.virt.hardware [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1238.229934] env[70020]: DEBUG nova.virt.hardware [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1238.230106] env[70020]: DEBUG nova.virt.hardware [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1238.230276] env[70020]: DEBUG nova.virt.hardware [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1238.230441] env[70020]: DEBUG nova.virt.hardware [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1238.230627] env[70020]: DEBUG nova.virt.hardware [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1238.231727] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d0bff6e-e26d-4f38-ad40-854c965f8d87 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.240546] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bcd1c99-7d8d-428c-aeba-48c03af0017c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.374833] env[70020]: DEBUG nova.compute.manager [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1238.386593] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619246, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.437522] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.239s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1238.438076] env[70020]: DEBUG nova.compute.manager [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1238.440802] env[70020]: DEBUG oslo_concurrency.lockutils [None req-880b699d-86f4-41d1-b850-092f6590290c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.918s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1238.441027] env[70020]: DEBUG nova.objects.instance [None req-880b699d-86f4-41d1-b850-092f6590290c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lazy-loading 'resources' on Instance uuid 115a8a58-d3ce-4778-9bc7-c75d0007b499 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1238.508161] env[70020]: DEBUG oslo_concurrency.lockutils [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Releasing lock "refresh_cache-d28f6dff-8f9f-41d4-87ae-0ff87327d042" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1238.508501] env[70020]: DEBUG nova.compute.manager [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Instance network_info: |[{"id": "c963937e-c9c9-452b-a0d2-b2a4314681dd", "address": "fa:16:3e:d7:7d:d0", "network": {"id": "b566519a-edda-4c57-92ca-a702e586c638", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-790170115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74b060ffb3ac4ecd95dcd85d4744dc2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc963937e-c9", "ovs_interfaceid": "c963937e-c9c9-452b-a0d2-b2a4314681dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1238.509598] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:7d:d0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1002b79b-224e-41e3-a484-4245a767147a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c963937e-c9c9-452b-a0d2-b2a4314681dd', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1238.516421] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1238.517010] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1238.517263] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8aa15903-5bbb-4165-8f77-26ca1e1cc6f8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.531848] env[70020]: INFO nova.compute.manager [-] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Took 1.94 seconds to deallocate network for instance. [ 1238.539374] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1238.539374] env[70020]: value = "task-3619247" [ 1238.539374] env[70020]: _type = "Task" [ 1238.539374] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.547399] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619247, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.890389] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619246, 'name': CreateVM_Task, 'duration_secs': 0.834842} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.890389] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 602328f7-258a-44f5-802c-d580824beea0] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1238.890667] env[70020]: DEBUG oslo_concurrency.lockutils [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1238.890826] env[70020]: DEBUG oslo_concurrency.lockutils [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1238.891157] env[70020]: DEBUG oslo_concurrency.lockutils [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1238.891412] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23d3894e-cedb-491c-b3a9-85bff1fe3ad9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.898268] env[70020]: DEBUG oslo_vmware.api [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Waiting for the task: (returnval){ [ 1238.898268] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]529d26e4-75e5-15d7-a6fd-45dfbb8a2fd3" [ 1238.898268] env[70020]: _type = "Task" [ 1238.898268] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.902898] env[70020]: DEBUG oslo_concurrency.lockutils [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1238.906290] env[70020]: DEBUG oslo_vmware.api [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]529d26e4-75e5-15d7-a6fd-45dfbb8a2fd3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.944645] env[70020]: DEBUG nova.compute.utils [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1238.950126] env[70020]: DEBUG nova.compute.manager [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1238.950420] env[70020]: DEBUG nova.network.neutron [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1238.998340] env[70020]: DEBUG nova.policy [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '914fc4078a214da891e7d12d242504cb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0573da12f56f4b18a103e4e9fdfb9c19', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1239.004958] env[70020]: DEBUG nova.compute.manager [req-470b0e1d-0874-459a-9f93-a0238438f63d req-6f8f9c84-76b6-4977-ad9f-0921b19c0b93 service nova] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Received event network-changed-c963937e-c9c9-452b-a0d2-b2a4314681dd {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1239.005474] env[70020]: DEBUG nova.compute.manager [req-470b0e1d-0874-459a-9f93-a0238438f63d req-6f8f9c84-76b6-4977-ad9f-0921b19c0b93 service nova] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Refreshing instance network info cache due to event network-changed-c963937e-c9c9-452b-a0d2-b2a4314681dd. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1239.005865] env[70020]: DEBUG oslo_concurrency.lockutils [req-470b0e1d-0874-459a-9f93-a0238438f63d req-6f8f9c84-76b6-4977-ad9f-0921b19c0b93 service nova] Acquiring lock "refresh_cache-d28f6dff-8f9f-41d4-87ae-0ff87327d042" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1239.006038] env[70020]: DEBUG oslo_concurrency.lockutils [req-470b0e1d-0874-459a-9f93-a0238438f63d req-6f8f9c84-76b6-4977-ad9f-0921b19c0b93 service nova] Acquired lock "refresh_cache-d28f6dff-8f9f-41d4-87ae-0ff87327d042" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1239.006239] env[70020]: DEBUG nova.network.neutron [req-470b0e1d-0874-459a-9f93-a0238438f63d req-6f8f9c84-76b6-4977-ad9f-0921b19c0b93 service nova] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Refreshing network info cache for port c963937e-c9c9-452b-a0d2-b2a4314681dd {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1239.041262] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0914f50d-69f9-4324-a198-a07653d027c1 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1239.053059] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619247, 'name': CreateVM_Task, 'duration_secs': 0.426429} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.056017] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1239.056967] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'attachment_id': '13cf2884-88e7-465c-b378-05b16f9bd2a1', 'guest_format': None, 'delete_on_termination': True, 'disk_bus': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721830', 'volume_id': '019bffbe-24bf-4b30-80fe-f387c8bba21b', 'name': 'volume-019bffbe-24bf-4b30-80fe-f387c8bba21b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd28f6dff-8f9f-41d4-87ae-0ff87327d042', 'attached_at': '', 'detached_at': '', 'volume_id': '019bffbe-24bf-4b30-80fe-f387c8bba21b', 'serial': '019bffbe-24bf-4b30-80fe-f387c8bba21b'}, 'mount_device': '/dev/sda', 'boot_index': 0, 'device_type': None, 'volume_type': None}], 'swap': None} {{(pid=70020) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1239.057201] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Root volume attach. Driver type: vmdk {{(pid=70020) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1239.058092] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dfd6a69-1862-4402-85d7-c94f984c52c5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.066257] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c31550c-9ed5-476b-bd6a-3b15c4da9224 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.076109] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bd79597-5fbb-4dbe-8ac7-de6c7306e09d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.085764] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-ccc59738-a366-4279-88a4-418477f65b47 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.094593] env[70020]: DEBUG oslo_vmware.api [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1239.094593] env[70020]: value = "task-3619248" [ 1239.094593] env[70020]: _type = "Task" [ 1239.094593] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.100078] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5b0d721-22f2-46af-95ea-5dca994c397d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.107596] env[70020]: DEBUG oslo_vmware.api [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619248, 'name': RelocateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.111065] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73949d45-4ca7-477d-8f5c-ddeadf30332c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.144722] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6308c761-46ce-4583-a863-e4df1037c215 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.153407] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cc8615c-4135-4dfd-9a8f-8abf0ef37d4f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.167140] env[70020]: DEBUG nova.compute.provider_tree [None req-880b699d-86f4-41d1-b850-092f6590290c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1239.333328] env[70020]: DEBUG nova.network.neutron [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Successfully created port: 0889717d-3194-4204-a46b-57e94fc35d6c {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1239.417768] env[70020]: DEBUG oslo_vmware.api [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]529d26e4-75e5-15d7-a6fd-45dfbb8a2fd3, 'name': SearchDatastore_Task, 'duration_secs': 0.014446} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.418150] env[70020]: DEBUG oslo_concurrency.lockutils [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1239.418964] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1239.419427] env[70020]: DEBUG oslo_concurrency.lockutils [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1239.419579] env[70020]: DEBUG oslo_concurrency.lockutils [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1239.420052] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1239.420830] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-93e44e6a-fb32-4127-831a-de8d3ee58430 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.430311] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1239.430767] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1239.431334] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39386927-d592-45a5-ad35-bc8b16a448a1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.439510] env[70020]: DEBUG oslo_vmware.api [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Waiting for the task: (returnval){ [ 1239.439510] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52975201-b5b8-56fa-45af-f2f8f4a54a64" [ 1239.439510] env[70020]: _type = "Task" [ 1239.439510] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.448764] env[70020]: DEBUG oslo_vmware.api [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52975201-b5b8-56fa-45af-f2f8f4a54a64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.450393] env[70020]: DEBUG nova.compute.manager [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1239.609293] env[70020]: DEBUG oslo_vmware.api [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619248, 'name': RelocateVM_Task} progress is 42%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.671222] env[70020]: DEBUG nova.scheduler.client.report [None req-880b699d-86f4-41d1-b850-092f6590290c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1239.739351] env[70020]: DEBUG nova.network.neutron [req-470b0e1d-0874-459a-9f93-a0238438f63d req-6f8f9c84-76b6-4977-ad9f-0921b19c0b93 service nova] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Updated VIF entry in instance network info cache for port c963937e-c9c9-452b-a0d2-b2a4314681dd. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1239.739711] env[70020]: DEBUG nova.network.neutron [req-470b0e1d-0874-459a-9f93-a0238438f63d req-6f8f9c84-76b6-4977-ad9f-0921b19c0b93 service nova] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Updating instance_info_cache with network_info: [{"id": "c963937e-c9c9-452b-a0d2-b2a4314681dd", "address": "fa:16:3e:d7:7d:d0", "network": {"id": "b566519a-edda-4c57-92ca-a702e586c638", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-790170115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74b060ffb3ac4ecd95dcd85d4744dc2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc963937e-c9", "ovs_interfaceid": "c963937e-c9c9-452b-a0d2-b2a4314681dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1239.960579] env[70020]: DEBUG oslo_vmware.api [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52975201-b5b8-56fa-45af-f2f8f4a54a64, 'name': SearchDatastore_Task, 'duration_secs': 0.0155} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.961805] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a50cedc7-810c-43ab-87ae-1537b612eb00 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.968219] env[70020]: DEBUG oslo_vmware.api [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Waiting for the task: (returnval){ [ 1239.968219] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52e4ebbb-d2f4-fd40-e417-1d71eff679db" [ 1239.968219] env[70020]: _type = "Task" [ 1239.968219] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.977795] env[70020]: DEBUG oslo_vmware.api [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52e4ebbb-d2f4-fd40-e417-1d71eff679db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.105222] env[70020]: DEBUG oslo_vmware.api [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619248, 'name': RelocateVM_Task} progress is 56%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.175868] env[70020]: DEBUG oslo_concurrency.lockutils [None req-880b699d-86f4-41d1-b850-092f6590290c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.735s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1240.178488] env[70020]: DEBUG oslo_concurrency.lockutils [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.275s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1240.179968] env[70020]: INFO nova.compute.claims [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1240.203009] env[70020]: INFO nova.scheduler.client.report [None req-880b699d-86f4-41d1-b850-092f6590290c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Deleted allocations for instance 115a8a58-d3ce-4778-9bc7-c75d0007b499 [ 1240.242119] env[70020]: DEBUG oslo_concurrency.lockutils [req-470b0e1d-0874-459a-9f93-a0238438f63d req-6f8f9c84-76b6-4977-ad9f-0921b19c0b93 service nova] Releasing lock "refresh_cache-d28f6dff-8f9f-41d4-87ae-0ff87327d042" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1240.463697] env[70020]: DEBUG nova.compute.manager [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1240.479624] env[70020]: DEBUG oslo_vmware.api [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52e4ebbb-d2f4-fd40-e417-1d71eff679db, 'name': SearchDatastore_Task, 'duration_secs': 0.017197} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.479871] env[70020]: DEBUG oslo_concurrency.lockutils [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1240.480164] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 602328f7-258a-44f5-802c-d580824beea0/602328f7-258a-44f5-802c-d580824beea0.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1240.480433] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7ea9f359-80e0-44d1-99f4-bde5bd5a9d4c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.490974] env[70020]: DEBUG nova.virt.hardware [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1240.491244] env[70020]: DEBUG nova.virt.hardware [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1240.491399] env[70020]: DEBUG nova.virt.hardware [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1240.491580] env[70020]: DEBUG nova.virt.hardware [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1240.491724] env[70020]: DEBUG nova.virt.hardware [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1240.491869] env[70020]: DEBUG nova.virt.hardware [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1240.492088] env[70020]: DEBUG nova.virt.hardware [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1240.492250] env[70020]: DEBUG nova.virt.hardware [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1240.492415] env[70020]: DEBUG nova.virt.hardware [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1240.492575] env[70020]: DEBUG nova.virt.hardware [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1240.492745] env[70020]: DEBUG nova.virt.hardware [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1240.493614] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faec2787-7069-4f6c-a637-bcb813f832f1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.497562] env[70020]: DEBUG oslo_vmware.api [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Waiting for the task: (returnval){ [ 1240.497562] env[70020]: value = "task-3619249" [ 1240.497562] env[70020]: _type = "Task" [ 1240.497562] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.504590] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc89327e-d538-47fc-9ab3-605df8e2cd37 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.512174] env[70020]: DEBUG oslo_vmware.api [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': task-3619249, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.606462] env[70020]: DEBUG oslo_vmware.api [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619248, 'name': RelocateVM_Task} progress is 71%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.712700] env[70020]: DEBUG oslo_concurrency.lockutils [None req-880b699d-86f4-41d1-b850-092f6590290c tempest-ServerDiskConfigTestJSON-536876121 tempest-ServerDiskConfigTestJSON-536876121-project-member] Lock "115a8a58-d3ce-4778-9bc7-c75d0007b499" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.599s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1240.788987] env[70020]: DEBUG nova.compute.manager [req-acc52dd7-398d-4058-8cbc-54ac49bb216c req-f2f3e02a-9774-4b09-a3c3-8e638bb03107 service nova] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Received event network-vif-plugged-0889717d-3194-4204-a46b-57e94fc35d6c {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1240.789252] env[70020]: DEBUG oslo_concurrency.lockutils [req-acc52dd7-398d-4058-8cbc-54ac49bb216c req-f2f3e02a-9774-4b09-a3c3-8e638bb03107 service nova] Acquiring lock "e96aae17-5ae5-404b-bbe3-46777f7c34d2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1240.789466] env[70020]: DEBUG oslo_concurrency.lockutils [req-acc52dd7-398d-4058-8cbc-54ac49bb216c req-f2f3e02a-9774-4b09-a3c3-8e638bb03107 service nova] Lock "e96aae17-5ae5-404b-bbe3-46777f7c34d2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1240.789637] env[70020]: DEBUG oslo_concurrency.lockutils [req-acc52dd7-398d-4058-8cbc-54ac49bb216c req-f2f3e02a-9774-4b09-a3c3-8e638bb03107 service nova] Lock "e96aae17-5ae5-404b-bbe3-46777f7c34d2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1240.789806] env[70020]: DEBUG nova.compute.manager [req-acc52dd7-398d-4058-8cbc-54ac49bb216c req-f2f3e02a-9774-4b09-a3c3-8e638bb03107 service nova] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] No waiting events found dispatching network-vif-plugged-0889717d-3194-4204-a46b-57e94fc35d6c {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1240.789970] env[70020]: WARNING nova.compute.manager [req-acc52dd7-398d-4058-8cbc-54ac49bb216c req-f2f3e02a-9774-4b09-a3c3-8e638bb03107 service nova] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Received unexpected event network-vif-plugged-0889717d-3194-4204-a46b-57e94fc35d6c for instance with vm_state building and task_state spawning. [ 1240.893734] env[70020]: DEBUG nova.network.neutron [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Successfully updated port: 0889717d-3194-4204-a46b-57e94fc35d6c {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1241.011198] env[70020]: DEBUG oslo_vmware.api [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': task-3619249, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.107467] env[70020]: DEBUG oslo_vmware.api [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619248, 'name': RelocateVM_Task} progress is 86%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.314287] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ac11a3e-08a7-4c19-a4bd-bab21388acf8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.322335] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ec20678-3ea5-434b-be9d-67419958b03b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.353403] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27343948-1570-4740-b358-8a7b81cc77c1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.360793] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ae04db3-ced3-4478-b756-0bbe04fb72c5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.373943] env[70020]: DEBUG nova.compute.provider_tree [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1241.396938] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "refresh_cache-e96aae17-5ae5-404b-bbe3-46777f7c34d2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1241.397185] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquired lock "refresh_cache-e96aae17-5ae5-404b-bbe3-46777f7c34d2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1241.397377] env[70020]: DEBUG nova.network.neutron [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1241.509043] env[70020]: DEBUG oslo_vmware.api [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': task-3619249, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.722497} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.509043] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 602328f7-258a-44f5-802c-d580824beea0/602328f7-258a-44f5-802c-d580824beea0.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1241.509043] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1241.509043] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-49e86b82-49b9-4ca0-8384-29430ae5e4ae {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.517285] env[70020]: DEBUG oslo_vmware.api [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Waiting for the task: (returnval){ [ 1241.517285] env[70020]: value = "task-3619250" [ 1241.517285] env[70020]: _type = "Task" [ 1241.517285] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.524764] env[70020]: DEBUG oslo_vmware.api [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': task-3619250, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.608278] env[70020]: DEBUG oslo_vmware.api [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619248, 'name': RelocateVM_Task} progress is 97%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.877488] env[70020]: DEBUG nova.scheduler.client.report [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1241.942015] env[70020]: DEBUG nova.network.neutron [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1242.028530] env[70020]: DEBUG oslo_vmware.api [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': task-3619250, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066202} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.028810] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1242.029596] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37345ef9-9cae-4e04-9f47-358496e5dd9f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.054120] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Reconfiguring VM instance instance-00000078 to attach disk [datastore1] 602328f7-258a-44f5-802c-d580824beea0/602328f7-258a-44f5-802c-d580824beea0.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1242.054434] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-33113070-f525-447e-9a6a-9a62ab28ed75 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.076843] env[70020]: DEBUG oslo_vmware.api [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Waiting for the task: (returnval){ [ 1242.076843] env[70020]: value = "task-3619251" [ 1242.076843] env[70020]: _type = "Task" [ 1242.076843] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.085233] env[70020]: DEBUG oslo_vmware.api [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': task-3619251, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.109960] env[70020]: DEBUG oslo_vmware.api [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619248, 'name': RelocateVM_Task, 'duration_secs': 2.989563} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.110478] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Volume attach. Driver type: vmdk {{(pid=70020) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1242.110692] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721830', 'volume_id': '019bffbe-24bf-4b30-80fe-f387c8bba21b', 'name': 'volume-019bffbe-24bf-4b30-80fe-f387c8bba21b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd28f6dff-8f9f-41d4-87ae-0ff87327d042', 'attached_at': '', 'detached_at': '', 'volume_id': '019bffbe-24bf-4b30-80fe-f387c8bba21b', 'serial': '019bffbe-24bf-4b30-80fe-f387c8bba21b'} {{(pid=70020) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1242.111483] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8b33c8a-2e4c-45cd-8d5f-ec681547286c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.127879] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb172e57-20b0-4171-b117-70562da00028 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.151033] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] volume-019bffbe-24bf-4b30-80fe-f387c8bba21b/volume-019bffbe-24bf-4b30-80fe-f387c8bba21b.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1242.151785] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6d176363-b4c7-4d6c-afe6-57bdf89098a9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.175085] env[70020]: DEBUG oslo_vmware.api [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1242.175085] env[70020]: value = "task-3619252" [ 1242.175085] env[70020]: _type = "Task" [ 1242.175085] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.183065] env[70020]: DEBUG oslo_vmware.api [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619252, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.265075] env[70020]: DEBUG nova.network.neutron [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Updating instance_info_cache with network_info: [{"id": "0889717d-3194-4204-a46b-57e94fc35d6c", "address": "fa:16:3e:ed:64:03", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0889717d-31", "ovs_interfaceid": "0889717d-3194-4204-a46b-57e94fc35d6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1242.382056] env[70020]: DEBUG oslo_concurrency.lockutils [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.204s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1242.382605] env[70020]: DEBUG nova.compute.manager [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1242.385754] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0914f50d-69f9-4324-a198-a07653d027c1 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.344s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1242.385754] env[70020]: DEBUG nova.objects.instance [None req-0914f50d-69f9-4324-a198-a07653d027c1 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lazy-loading 'resources' on Instance uuid 1b25f8db-457e-4948-b9da-35e2fa5b897e {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1242.587190] env[70020]: DEBUG oslo_vmware.api [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': task-3619251, 'name': ReconfigVM_Task, 'duration_secs': 0.278133} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.587474] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Reconfigured VM instance instance-00000078 to attach disk [datastore1] 602328f7-258a-44f5-802c-d580824beea0/602328f7-258a-44f5-802c-d580824beea0.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1242.588091] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4a355b25-333a-45a3-9e59-f082b2620998 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.594666] env[70020]: DEBUG oslo_vmware.api [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Waiting for the task: (returnval){ [ 1242.594666] env[70020]: value = "task-3619253" [ 1242.594666] env[70020]: _type = "Task" [ 1242.594666] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.602069] env[70020]: DEBUG oslo_vmware.api [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': task-3619253, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.687473] env[70020]: DEBUG oslo_vmware.api [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619252, 'name': ReconfigVM_Task, 'duration_secs': 0.301617} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.690129] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Reconfigured VM instance instance-00000079 to attach disk [datastore1] volume-019bffbe-24bf-4b30-80fe-f387c8bba21b/volume-019bffbe-24bf-4b30-80fe-f387c8bba21b.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1242.695468] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-02c19078-59ce-4db9-9164-604c2e5ce480 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.714688] env[70020]: DEBUG oslo_vmware.api [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1242.714688] env[70020]: value = "task-3619254" [ 1242.714688] env[70020]: _type = "Task" [ 1242.714688] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.723791] env[70020]: DEBUG oslo_vmware.api [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619254, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.767527] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Releasing lock "refresh_cache-e96aae17-5ae5-404b-bbe3-46777f7c34d2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1242.767862] env[70020]: DEBUG nova.compute.manager [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Instance network_info: |[{"id": "0889717d-3194-4204-a46b-57e94fc35d6c", "address": "fa:16:3e:ed:64:03", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0889717d-31", "ovs_interfaceid": "0889717d-3194-4204-a46b-57e94fc35d6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1242.768298] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ed:64:03', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd5970ab5-34b8-4065-bfa6-f568b8f103b7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0889717d-3194-4204-a46b-57e94fc35d6c', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1242.775962] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1242.776206] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1242.776430] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9b3b87b9-3ef9-4ef9-baf8-02b5a8a8a56b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.797261] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1242.797261] env[70020]: value = "task-3619255" [ 1242.797261] env[70020]: _type = "Task" [ 1242.797261] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.805206] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619255, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.820316] env[70020]: DEBUG nova.compute.manager [req-4afde39f-3197-4474-bca2-21b340434aa3 req-84cc5675-e5f0-4dc2-b1b5-2727f99bbac3 service nova] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Received event network-changed-0889717d-3194-4204-a46b-57e94fc35d6c {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1242.820549] env[70020]: DEBUG nova.compute.manager [req-4afde39f-3197-4474-bca2-21b340434aa3 req-84cc5675-e5f0-4dc2-b1b5-2727f99bbac3 service nova] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Refreshing instance network info cache due to event network-changed-0889717d-3194-4204-a46b-57e94fc35d6c. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1242.820805] env[70020]: DEBUG oslo_concurrency.lockutils [req-4afde39f-3197-4474-bca2-21b340434aa3 req-84cc5675-e5f0-4dc2-b1b5-2727f99bbac3 service nova] Acquiring lock "refresh_cache-e96aae17-5ae5-404b-bbe3-46777f7c34d2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1242.821254] env[70020]: DEBUG oslo_concurrency.lockutils [req-4afde39f-3197-4474-bca2-21b340434aa3 req-84cc5675-e5f0-4dc2-b1b5-2727f99bbac3 service nova] Acquired lock "refresh_cache-e96aae17-5ae5-404b-bbe3-46777f7c34d2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1242.821354] env[70020]: DEBUG nova.network.neutron [req-4afde39f-3197-4474-bca2-21b340434aa3 req-84cc5675-e5f0-4dc2-b1b5-2727f99bbac3 service nova] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Refreshing network info cache for port 0889717d-3194-4204-a46b-57e94fc35d6c {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1242.889113] env[70020]: DEBUG nova.compute.utils [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1242.896970] env[70020]: DEBUG nova.compute.manager [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1242.898994] env[70020]: DEBUG nova.network.neutron [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1242.942095] env[70020]: DEBUG nova.policy [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7b543e081f574f1f85874775a734a0a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4e3eae740ef84ef88aef113ed4d6e57b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1243.036326] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8605995-48d2-49cc-a3c3-85a8bb8df647 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.044660] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c92f73fd-2c60-4fd3-9dc9-e2bc94414b92 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.078129] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc0c8531-d791-4a2e-b5f0-1b713e227a8d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.086044] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-706f4d5d-e93a-4443-ad72-8dbd1a93bb19 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.101748] env[70020]: DEBUG nova.compute.provider_tree [None req-0914f50d-69f9-4324-a198-a07653d027c1 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1243.111185] env[70020]: DEBUG oslo_vmware.api [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': task-3619253, 'name': Rename_Task, 'duration_secs': 0.140373} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.112016] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1243.112265] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a04f6cc5-2634-4d3d-86b9-16de45588310 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.118679] env[70020]: DEBUG oslo_vmware.api [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Waiting for the task: (returnval){ [ 1243.118679] env[70020]: value = "task-3619256" [ 1243.118679] env[70020]: _type = "Task" [ 1243.118679] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.126847] env[70020]: DEBUG oslo_vmware.api [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': task-3619256, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.224719] env[70020]: DEBUG oslo_vmware.api [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619254, 'name': ReconfigVM_Task, 'duration_secs': 0.135809} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.225487] env[70020]: DEBUG nova.network.neutron [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Successfully created port: f4583380-5208-4372-ab67-cc6b64a287d2 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1243.227432] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721830', 'volume_id': '019bffbe-24bf-4b30-80fe-f387c8bba21b', 'name': 'volume-019bffbe-24bf-4b30-80fe-f387c8bba21b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd28f6dff-8f9f-41d4-87ae-0ff87327d042', 'attached_at': '', 'detached_at': '', 'volume_id': '019bffbe-24bf-4b30-80fe-f387c8bba21b', 'serial': '019bffbe-24bf-4b30-80fe-f387c8bba21b'} {{(pid=70020) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1243.227956] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cb08b80b-b9d9-4a23-9781-830145fae35c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.233904] env[70020]: DEBUG oslo_vmware.api [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1243.233904] env[70020]: value = "task-3619257" [ 1243.233904] env[70020]: _type = "Task" [ 1243.233904] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.246468] env[70020]: DEBUG oslo_vmware.api [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619257, 'name': Rename_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.313678] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619255, 'name': CreateVM_Task, 'duration_secs': 0.360277} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.314034] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1243.315308] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1243.315763] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1243.316291] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1243.316719] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6259280-724a-46c2-bb5b-a2254915737c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.325058] env[70020]: DEBUG oslo_vmware.api [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1243.325058] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]524e568e-db71-e190-74d8-6feac7f238a1" [ 1243.325058] env[70020]: _type = "Task" [ 1243.325058] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.336287] env[70020]: DEBUG oslo_vmware.api [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]524e568e-db71-e190-74d8-6feac7f238a1, 'name': SearchDatastore_Task, 'duration_secs': 0.010496} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.336629] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1243.336898] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1243.337148] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1243.338017] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1243.338017] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1243.338017] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-db8c3f0e-2edd-44e0-95ff-78aee6e2ed2c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.345920] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1243.346146] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1243.347371] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-318efe02-c6e8-4f6e-a17f-5c30a7300cdc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.355052] env[70020]: DEBUG oslo_vmware.api [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1243.355052] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52f9939d-d801-6c2b-141b-55a49fc71cfd" [ 1243.355052] env[70020]: _type = "Task" [ 1243.355052] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.366019] env[70020]: DEBUG oslo_vmware.api [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52f9939d-d801-6c2b-141b-55a49fc71cfd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.396050] env[70020]: DEBUG nova.compute.manager [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1243.608017] env[70020]: DEBUG nova.scheduler.client.report [None req-0914f50d-69f9-4324-a198-a07653d027c1 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1243.628797] env[70020]: DEBUG oslo_vmware.api [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': task-3619256, 'name': PowerOnVM_Task, 'duration_secs': 0.461381} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.628797] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1243.628982] env[70020]: DEBUG nova.compute.manager [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1243.629677] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0016d4b6-bb0c-45c1-84d4-793763427416 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.659655] env[70020]: DEBUG nova.network.neutron [req-4afde39f-3197-4474-bca2-21b340434aa3 req-84cc5675-e5f0-4dc2-b1b5-2727f99bbac3 service nova] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Updated VIF entry in instance network info cache for port 0889717d-3194-4204-a46b-57e94fc35d6c. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1243.660070] env[70020]: DEBUG nova.network.neutron [req-4afde39f-3197-4474-bca2-21b340434aa3 req-84cc5675-e5f0-4dc2-b1b5-2727f99bbac3 service nova] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Updating instance_info_cache with network_info: [{"id": "0889717d-3194-4204-a46b-57e94fc35d6c", "address": "fa:16:3e:ed:64:03", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0889717d-31", "ovs_interfaceid": "0889717d-3194-4204-a46b-57e94fc35d6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1243.745128] env[70020]: DEBUG oslo_vmware.api [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619257, 'name': Rename_Task, 'duration_secs': 0.136649} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.745323] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1243.745575] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2afd71ed-50d3-47f6-a81f-2a39773c5611 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.752055] env[70020]: DEBUG oslo_vmware.api [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1243.752055] env[70020]: value = "task-3619258" [ 1243.752055] env[70020]: _type = "Task" [ 1243.752055] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.761324] env[70020]: DEBUG oslo_vmware.api [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619258, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.864416] env[70020]: DEBUG oslo_vmware.api [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52f9939d-d801-6c2b-141b-55a49fc71cfd, 'name': SearchDatastore_Task, 'duration_secs': 0.011329} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.865255] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e401b34-b11c-4e96-98bb-39f076a01390 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.871521] env[70020]: DEBUG oslo_vmware.api [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1243.871521] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]522feef2-b34d-75bc-25c9-309baa65f372" [ 1243.871521] env[70020]: _type = "Task" [ 1243.871521] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.879604] env[70020]: DEBUG oslo_vmware.api [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]522feef2-b34d-75bc-25c9-309baa65f372, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.114579] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0914f50d-69f9-4324-a198-a07653d027c1 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.729s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1244.137429] env[70020]: INFO nova.scheduler.client.report [None req-0914f50d-69f9-4324-a198-a07653d027c1 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Deleted allocations for instance 1b25f8db-457e-4948-b9da-35e2fa5b897e [ 1244.146921] env[70020]: DEBUG oslo_concurrency.lockutils [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1244.147102] env[70020]: DEBUG oslo_concurrency.lockutils [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1244.147176] env[70020]: DEBUG nova.objects.instance [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Trying to apply a migration context that does not seem to be set for this instance {{(pid=70020) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1244.163113] env[70020]: DEBUG oslo_concurrency.lockutils [req-4afde39f-3197-4474-bca2-21b340434aa3 req-84cc5675-e5f0-4dc2-b1b5-2727f99bbac3 service nova] Releasing lock "refresh_cache-e96aae17-5ae5-404b-bbe3-46777f7c34d2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1244.263091] env[70020]: DEBUG oslo_vmware.api [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619258, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.383643] env[70020]: DEBUG oslo_vmware.api [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]522feef2-b34d-75bc-25c9-309baa65f372, 'name': SearchDatastore_Task, 'duration_secs': 0.010612} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.383899] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1244.384190] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] e96aae17-5ae5-404b-bbe3-46777f7c34d2/e96aae17-5ae5-404b-bbe3-46777f7c34d2.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1244.384445] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ac6c86a9-1d56-424f-ae53-3b73960ff5fb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.390555] env[70020]: DEBUG oslo_vmware.api [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1244.390555] env[70020]: value = "task-3619259" [ 1244.390555] env[70020]: _type = "Task" [ 1244.390555] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.398399] env[70020]: DEBUG oslo_vmware.api [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619259, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.411839] env[70020]: DEBUG nova.compute.manager [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1244.438259] env[70020]: DEBUG nova.virt.hardware [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1244.438532] env[70020]: DEBUG nova.virt.hardware [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1244.438680] env[70020]: DEBUG nova.virt.hardware [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1244.438868] env[70020]: DEBUG nova.virt.hardware [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1244.440028] env[70020]: DEBUG nova.virt.hardware [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1244.440028] env[70020]: DEBUG nova.virt.hardware [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1244.440028] env[70020]: DEBUG nova.virt.hardware [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1244.440028] env[70020]: DEBUG nova.virt.hardware [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1244.440028] env[70020]: DEBUG nova.virt.hardware [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1244.440028] env[70020]: DEBUG nova.virt.hardware [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1244.440028] env[70020]: DEBUG nova.virt.hardware [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1244.440878] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1573ec8-2337-435b-8929-ee7c41644408 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.449047] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d7c7f0f-bd99-4338-9736-dcd0bc6d222f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.495739] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ef3dd78a-8810-4031-b7f0-9ab59ebe8f65 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Acquiring lock "602328f7-258a-44f5-802c-d580824beea0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1244.496024] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ef3dd78a-8810-4031-b7f0-9ab59ebe8f65 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Lock "602328f7-258a-44f5-802c-d580824beea0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1244.496242] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ef3dd78a-8810-4031-b7f0-9ab59ebe8f65 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Acquiring lock "602328f7-258a-44f5-802c-d580824beea0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1244.496527] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ef3dd78a-8810-4031-b7f0-9ab59ebe8f65 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Lock "602328f7-258a-44f5-802c-d580824beea0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1244.497083] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ef3dd78a-8810-4031-b7f0-9ab59ebe8f65 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Lock "602328f7-258a-44f5-802c-d580824beea0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1244.498978] env[70020]: INFO nova.compute.manager [None req-ef3dd78a-8810-4031-b7f0-9ab59ebe8f65 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Terminating instance [ 1244.648447] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0914f50d-69f9-4324-a198-a07653d027c1 tempest-AttachVolumeNegativeTest-680083968 tempest-AttachVolumeNegativeTest-680083968-project-member] Lock "1b25f8db-457e-4948-b9da-35e2fa5b897e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.720s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1244.765186] env[70020]: DEBUG oslo_vmware.api [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619258, 'name': PowerOnVM_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.858187] env[70020]: DEBUG nova.compute.manager [req-2ee45486-7b26-4c76-a81d-ac8cec1a7441 req-62e29853-e2f8-4aab-aa66-5bff370ffd73 service nova] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Received event network-vif-plugged-f4583380-5208-4372-ab67-cc6b64a287d2 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1244.858456] env[70020]: DEBUG oslo_concurrency.lockutils [req-2ee45486-7b26-4c76-a81d-ac8cec1a7441 req-62e29853-e2f8-4aab-aa66-5bff370ffd73 service nova] Acquiring lock "845ea37a-9945-49cd-a1bd-3da91f4af16b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1244.859058] env[70020]: DEBUG oslo_concurrency.lockutils [req-2ee45486-7b26-4c76-a81d-ac8cec1a7441 req-62e29853-e2f8-4aab-aa66-5bff370ffd73 service nova] Lock "845ea37a-9945-49cd-a1bd-3da91f4af16b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1244.859360] env[70020]: DEBUG oslo_concurrency.lockutils [req-2ee45486-7b26-4c76-a81d-ac8cec1a7441 req-62e29853-e2f8-4aab-aa66-5bff370ffd73 service nova] Lock "845ea37a-9945-49cd-a1bd-3da91f4af16b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1244.859632] env[70020]: DEBUG nova.compute.manager [req-2ee45486-7b26-4c76-a81d-ac8cec1a7441 req-62e29853-e2f8-4aab-aa66-5bff370ffd73 service nova] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] No waiting events found dispatching network-vif-plugged-f4583380-5208-4372-ab67-cc6b64a287d2 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1244.859870] env[70020]: WARNING nova.compute.manager [req-2ee45486-7b26-4c76-a81d-ac8cec1a7441 req-62e29853-e2f8-4aab-aa66-5bff370ffd73 service nova] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Received unexpected event network-vif-plugged-f4583380-5208-4372-ab67-cc6b64a287d2 for instance with vm_state building and task_state spawning. [ 1244.900639] env[70020]: DEBUG oslo_vmware.api [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619259, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.910039] env[70020]: DEBUG nova.network.neutron [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Successfully updated port: f4583380-5208-4372-ab67-cc6b64a287d2 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1245.002498] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ef3dd78a-8810-4031-b7f0-9ab59ebe8f65 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Acquiring lock "refresh_cache-602328f7-258a-44f5-802c-d580824beea0" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1245.002756] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ef3dd78a-8810-4031-b7f0-9ab59ebe8f65 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Acquired lock "refresh_cache-602328f7-258a-44f5-802c-d580824beea0" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1245.002990] env[70020]: DEBUG nova.network.neutron [None req-ef3dd78a-8810-4031-b7f0-9ab59ebe8f65 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1245.154315] env[70020]: DEBUG oslo_concurrency.lockutils [None req-24cbc884-ed97-4597-a539-a02d489b4d2b tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.007s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1245.266322] env[70020]: DEBUG oslo_vmware.api [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619258, 'name': PowerOnVM_Task, 'duration_secs': 1.161219} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.266650] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1245.266978] env[70020]: INFO nova.compute.manager [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Took 7.04 seconds to spawn the instance on the hypervisor. [ 1245.267271] env[70020]: DEBUG nova.compute.manager [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1245.268382] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73c843a1-c90b-4d6f-934d-32298e80fa7c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.401408] env[70020]: DEBUG oslo_vmware.api [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619259, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.413202] env[70020]: DEBUG oslo_concurrency.lockutils [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "refresh_cache-845ea37a-9945-49cd-a1bd-3da91f4af16b" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1245.413351] env[70020]: DEBUG oslo_concurrency.lockutils [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquired lock "refresh_cache-845ea37a-9945-49cd-a1bd-3da91f4af16b" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1245.413509] env[70020]: DEBUG nova.network.neutron [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1245.521261] env[70020]: DEBUG nova.network.neutron [None req-ef3dd78a-8810-4031-b7f0-9ab59ebe8f65 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1245.576340] env[70020]: DEBUG nova.network.neutron [None req-ef3dd78a-8810-4031-b7f0-9ab59ebe8f65 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1245.788916] env[70020]: INFO nova.compute.manager [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Took 13.53 seconds to build instance. [ 1245.902340] env[70020]: DEBUG oslo_vmware.api [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619259, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.956354] env[70020]: DEBUG nova.network.neutron [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1246.079521] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ef3dd78a-8810-4031-b7f0-9ab59ebe8f65 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Releasing lock "refresh_cache-602328f7-258a-44f5-802c-d580824beea0" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1246.080589] env[70020]: DEBUG nova.compute.manager [None req-ef3dd78a-8810-4031-b7f0-9ab59ebe8f65 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1246.080589] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ef3dd78a-8810-4031-b7f0-9ab59ebe8f65 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1246.082034] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd913893-7361-4ae3-b8d1-7229673bf843 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.092870] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef3dd78a-8810-4031-b7f0-9ab59ebe8f65 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1246.093224] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-60cf9b7f-a9ea-4818-948a-e4b3c03a2c47 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.103451] env[70020]: DEBUG oslo_vmware.api [None req-ef3dd78a-8810-4031-b7f0-9ab59ebe8f65 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Waiting for the task: (returnval){ [ 1246.103451] env[70020]: value = "task-3619261" [ 1246.103451] env[70020]: _type = "Task" [ 1246.103451] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.112962] env[70020]: DEBUG oslo_vmware.api [None req-ef3dd78a-8810-4031-b7f0-9ab59ebe8f65 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': task-3619261, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.133612] env[70020]: DEBUG nova.network.neutron [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Updating instance_info_cache with network_info: [{"id": "f4583380-5208-4372-ab67-cc6b64a287d2", "address": "fa:16:3e:29:f4:ad", "network": {"id": "ba2c4d3c-4191-4de5-8533-90ca5dfc1dc0", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-599216784-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e3eae740ef84ef88aef113ed4d6e57b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4583380-52", "ovs_interfaceid": "f4583380-5208-4372-ab67-cc6b64a287d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1246.290701] env[70020]: DEBUG oslo_concurrency.lockutils [None req-aa494882-1fe6-4cf2-81a1-fa83c5c7ceac tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "d28f6dff-8f9f-41d4-87ae-0ff87327d042" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.042s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1246.303113] env[70020]: DEBUG nova.compute.manager [req-fa1d7874-36af-4643-a9f9-9d18f5dd3b63 req-a385bd02-dd40-4e64-8175-23fccd4fc569 service nova] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Received event network-changed-a59ccbd4-85b3-4a98-8407-29d65fea21f5 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1246.303113] env[70020]: DEBUG nova.compute.manager [req-fa1d7874-36af-4643-a9f9-9d18f5dd3b63 req-a385bd02-dd40-4e64-8175-23fccd4fc569 service nova] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Refreshing instance network info cache due to event network-changed-a59ccbd4-85b3-4a98-8407-29d65fea21f5. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1246.303113] env[70020]: DEBUG oslo_concurrency.lockutils [req-fa1d7874-36af-4643-a9f9-9d18f5dd3b63 req-a385bd02-dd40-4e64-8175-23fccd4fc569 service nova] Acquiring lock "refresh_cache-96966bf2-a9ff-48ba-be3f-c767e7b6eedd" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1246.303113] env[70020]: DEBUG oslo_concurrency.lockutils [req-fa1d7874-36af-4643-a9f9-9d18f5dd3b63 req-a385bd02-dd40-4e64-8175-23fccd4fc569 service nova] Acquired lock "refresh_cache-96966bf2-a9ff-48ba-be3f-c767e7b6eedd" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1246.303113] env[70020]: DEBUG nova.network.neutron [req-fa1d7874-36af-4643-a9f9-9d18f5dd3b63 req-a385bd02-dd40-4e64-8175-23fccd4fc569 service nova] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Refreshing network info cache for port a59ccbd4-85b3-4a98-8407-29d65fea21f5 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1246.404036] env[70020]: DEBUG oslo_vmware.api [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619259, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.527783} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.405038] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] e96aae17-5ae5-404b-bbe3-46777f7c34d2/e96aae17-5ae5-404b-bbe3-46777f7c34d2.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1246.405038] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1246.405038] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9855dd54-e7cf-4dd8-aa36-6fb5d480bd7f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.412233] env[70020]: DEBUG oslo_vmware.api [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1246.412233] env[70020]: value = "task-3619262" [ 1246.412233] env[70020]: _type = "Task" [ 1246.412233] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.420604] env[70020]: DEBUG oslo_vmware.api [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619262, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.613269] env[70020]: DEBUG oslo_vmware.api [None req-ef3dd78a-8810-4031-b7f0-9ab59ebe8f65 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': task-3619261, 'name': PowerOffVM_Task, 'duration_secs': 0.284467} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.613547] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef3dd78a-8810-4031-b7f0-9ab59ebe8f65 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1246.613715] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ef3dd78a-8810-4031-b7f0-9ab59ebe8f65 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1246.613964] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cabf9be4-0d65-4166-a718-7738367409e3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.636823] env[70020]: DEBUG oslo_concurrency.lockutils [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Releasing lock "refresh_cache-845ea37a-9945-49cd-a1bd-3da91f4af16b" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1246.636823] env[70020]: DEBUG nova.compute.manager [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Instance network_info: |[{"id": "f4583380-5208-4372-ab67-cc6b64a287d2", "address": "fa:16:3e:29:f4:ad", "network": {"id": "ba2c4d3c-4191-4de5-8533-90ca5dfc1dc0", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-599216784-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e3eae740ef84ef88aef113ed4d6e57b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4583380-52", "ovs_interfaceid": "f4583380-5208-4372-ab67-cc6b64a287d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1246.637951] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:29:f4:ad', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5b21ab10-d886-4453-9472-9e11fb3c450d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f4583380-5208-4372-ab67-cc6b64a287d2', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1246.644703] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1246.646035] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1246.646324] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ef3dd78a-8810-4031-b7f0-9ab59ebe8f65 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1246.646509] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ef3dd78a-8810-4031-b7f0-9ab59ebe8f65 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1246.646702] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef3dd78a-8810-4031-b7f0-9ab59ebe8f65 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Deleting the datastore file [datastore1] 602328f7-258a-44f5-802c-d580824beea0 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1246.646940] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4e11eb3e-e829-450f-b3b5-ecb0f40d1aff {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.661062] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c6adb57c-dd57-4e3a-a079-a4f426987f94 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.668043] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1246.668043] env[70020]: value = "task-3619264" [ 1246.668043] env[70020]: _type = "Task" [ 1246.668043] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.669249] env[70020]: DEBUG oslo_vmware.api [None req-ef3dd78a-8810-4031-b7f0-9ab59ebe8f65 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Waiting for the task: (returnval){ [ 1246.669249] env[70020]: value = "task-3619265" [ 1246.669249] env[70020]: _type = "Task" [ 1246.669249] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.681072] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619264, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.683960] env[70020]: DEBUG oslo_vmware.api [None req-ef3dd78a-8810-4031-b7f0-9ab59ebe8f65 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': task-3619265, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.892184] env[70020]: DEBUG nova.compute.manager [req-96e5154f-3d13-4183-a228-a3604acdf901 req-28ebc7b1-7fe5-46ab-aff2-14a5f480e28e service nova] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Received event network-changed-f4583380-5208-4372-ab67-cc6b64a287d2 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1246.892445] env[70020]: DEBUG nova.compute.manager [req-96e5154f-3d13-4183-a228-a3604acdf901 req-28ebc7b1-7fe5-46ab-aff2-14a5f480e28e service nova] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Refreshing instance network info cache due to event network-changed-f4583380-5208-4372-ab67-cc6b64a287d2. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1246.892578] env[70020]: DEBUG oslo_concurrency.lockutils [req-96e5154f-3d13-4183-a228-a3604acdf901 req-28ebc7b1-7fe5-46ab-aff2-14a5f480e28e service nova] Acquiring lock "refresh_cache-845ea37a-9945-49cd-a1bd-3da91f4af16b" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1246.892717] env[70020]: DEBUG oslo_concurrency.lockutils [req-96e5154f-3d13-4183-a228-a3604acdf901 req-28ebc7b1-7fe5-46ab-aff2-14a5f480e28e service nova] Acquired lock "refresh_cache-845ea37a-9945-49cd-a1bd-3da91f4af16b" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1246.892877] env[70020]: DEBUG nova.network.neutron [req-96e5154f-3d13-4183-a228-a3604acdf901 req-28ebc7b1-7fe5-46ab-aff2-14a5f480e28e service nova] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Refreshing network info cache for port f4583380-5208-4372-ab67-cc6b64a287d2 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1246.922888] env[70020]: DEBUG oslo_vmware.api [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619262, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062398} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.923291] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1246.924300] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e760e2df-828a-45bd-a13d-259ec411e7de {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.951106] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] e96aae17-5ae5-404b-bbe3-46777f7c34d2/e96aae17-5ae5-404b-bbe3-46777f7c34d2.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1246.951473] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9a5e13c2-0339-4aee-b74b-6f235a8c001b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.973584] env[70020]: DEBUG oslo_vmware.api [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1246.973584] env[70020]: value = "task-3619266" [ 1246.973584] env[70020]: _type = "Task" [ 1246.973584] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.984102] env[70020]: DEBUG oslo_vmware.api [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619266, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.041718] env[70020]: DEBUG nova.network.neutron [req-fa1d7874-36af-4643-a9f9-9d18f5dd3b63 req-a385bd02-dd40-4e64-8175-23fccd4fc569 service nova] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Updated VIF entry in instance network info cache for port a59ccbd4-85b3-4a98-8407-29d65fea21f5. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1247.042261] env[70020]: DEBUG nova.network.neutron [req-fa1d7874-36af-4643-a9f9-9d18f5dd3b63 req-a385bd02-dd40-4e64-8175-23fccd4fc569 service nova] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Updating instance_info_cache with network_info: [{"id": "a59ccbd4-85b3-4a98-8407-29d65fea21f5", "address": "fa:16:3e:ae:ed:1c", "network": {"id": "b566519a-edda-4c57-92ca-a702e586c638", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-790170115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74b060ffb3ac4ecd95dcd85d4744dc2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa59ccbd4-85", "ovs_interfaceid": "a59ccbd4-85b3-4a98-8407-29d65fea21f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1247.183440] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619264, 'name': CreateVM_Task, 'duration_secs': 0.370469} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.186166] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1247.186771] env[70020]: DEBUG oslo_vmware.api [None req-ef3dd78a-8810-4031-b7f0-9ab59ebe8f65 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Task: {'id': task-3619265, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.130788} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.187403] env[70020]: DEBUG oslo_concurrency.lockutils [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1247.187568] env[70020]: DEBUG oslo_concurrency.lockutils [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1247.187890] env[70020]: DEBUG oslo_concurrency.lockutils [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1247.188163] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef3dd78a-8810-4031-b7f0-9ab59ebe8f65 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1247.188331] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ef3dd78a-8810-4031-b7f0-9ab59ebe8f65 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1247.188491] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ef3dd78a-8810-4031-b7f0-9ab59ebe8f65 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1247.188654] env[70020]: INFO nova.compute.manager [None req-ef3dd78a-8810-4031-b7f0-9ab59ebe8f65 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] [instance: 602328f7-258a-44f5-802c-d580824beea0] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1247.188866] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ef3dd78a-8810-4031-b7f0-9ab59ebe8f65 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1247.189064] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6a49027-7e88-446e-82e1-15b7dab99878 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.190565] env[70020]: DEBUG nova.compute.manager [-] [instance: 602328f7-258a-44f5-802c-d580824beea0] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1247.190665] env[70020]: DEBUG nova.network.neutron [-] [instance: 602328f7-258a-44f5-802c-d580824beea0] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1247.194722] env[70020]: DEBUG oslo_vmware.api [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1247.194722] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52c23bdb-de08-22c4-507f-a5ef1bf5e1fb" [ 1247.194722] env[70020]: _type = "Task" [ 1247.194722] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.202678] env[70020]: DEBUG oslo_vmware.api [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52c23bdb-de08-22c4-507f-a5ef1bf5e1fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.205777] env[70020]: DEBUG nova.network.neutron [-] [instance: 602328f7-258a-44f5-802c-d580824beea0] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1247.484488] env[70020]: DEBUG oslo_vmware.api [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619266, 'name': ReconfigVM_Task, 'duration_secs': 0.499965} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.484765] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Reconfigured VM instance instance-0000007a to attach disk [datastore1] e96aae17-5ae5-404b-bbe3-46777f7c34d2/e96aae17-5ae5-404b-bbe3-46777f7c34d2.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1247.485405] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a26d1356-3f44-44a8-9713-887f7b3a34b8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.491037] env[70020]: DEBUG oslo_vmware.api [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1247.491037] env[70020]: value = "task-3619268" [ 1247.491037] env[70020]: _type = "Task" [ 1247.491037] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.499488] env[70020]: DEBUG oslo_vmware.api [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619268, 'name': Rename_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.545973] env[70020]: DEBUG oslo_concurrency.lockutils [req-fa1d7874-36af-4643-a9f9-9d18f5dd3b63 req-a385bd02-dd40-4e64-8175-23fccd4fc569 service nova] Releasing lock "refresh_cache-96966bf2-a9ff-48ba-be3f-c767e7b6eedd" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1247.587680] env[70020]: DEBUG nova.network.neutron [req-96e5154f-3d13-4183-a228-a3604acdf901 req-28ebc7b1-7fe5-46ab-aff2-14a5f480e28e service nova] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Updated VIF entry in instance network info cache for port f4583380-5208-4372-ab67-cc6b64a287d2. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1247.588066] env[70020]: DEBUG nova.network.neutron [req-96e5154f-3d13-4183-a228-a3604acdf901 req-28ebc7b1-7fe5-46ab-aff2-14a5f480e28e service nova] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Updating instance_info_cache with network_info: [{"id": "f4583380-5208-4372-ab67-cc6b64a287d2", "address": "fa:16:3e:29:f4:ad", "network": {"id": "ba2c4d3c-4191-4de5-8533-90ca5dfc1dc0", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-599216784-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e3eae740ef84ef88aef113ed4d6e57b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4583380-52", "ovs_interfaceid": "f4583380-5208-4372-ab67-cc6b64a287d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1247.705620] env[70020]: DEBUG oslo_vmware.api [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52c23bdb-de08-22c4-507f-a5ef1bf5e1fb, 'name': SearchDatastore_Task, 'duration_secs': 0.010508} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.705917] env[70020]: DEBUG oslo_concurrency.lockutils [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1247.706194] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1247.706472] env[70020]: DEBUG oslo_concurrency.lockutils [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1247.706650] env[70020]: DEBUG oslo_concurrency.lockutils [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1247.706881] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1247.707190] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-20beea45-1e99-455f-aafd-bcbc42ccacf4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.709202] env[70020]: DEBUG nova.network.neutron [-] [instance: 602328f7-258a-44f5-802c-d580824beea0] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1247.717132] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1247.717304] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1247.718561] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-782dfd8f-1a4e-4660-952b-592a65544dbe {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.724847] env[70020]: DEBUG nova.compute.manager [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Stashing vm_state: active {{(pid=70020) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1247.729445] env[70020]: DEBUG oslo_vmware.api [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1247.729445] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52b9a86c-1d4e-7a1a-1c7e-838deacdf004" [ 1247.729445] env[70020]: _type = "Task" [ 1247.729445] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.737416] env[70020]: DEBUG oslo_vmware.api [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b9a86c-1d4e-7a1a-1c7e-838deacdf004, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.001367] env[70020]: DEBUG oslo_vmware.api [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619268, 'name': Rename_Task, 'duration_secs': 0.164394} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.001718] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1248.001718] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4a3ded4f-b2d7-4b9d-9e39-773957042fd5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.009502] env[70020]: DEBUG oslo_vmware.api [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1248.009502] env[70020]: value = "task-3619269" [ 1248.009502] env[70020]: _type = "Task" [ 1248.009502] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.016979] env[70020]: DEBUG oslo_vmware.api [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619269, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.091292] env[70020]: DEBUG oslo_concurrency.lockutils [req-96e5154f-3d13-4183-a228-a3604acdf901 req-28ebc7b1-7fe5-46ab-aff2-14a5f480e28e service nova] Releasing lock "refresh_cache-845ea37a-9945-49cd-a1bd-3da91f4af16b" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1248.211616] env[70020]: INFO nova.compute.manager [-] [instance: 602328f7-258a-44f5-802c-d580824beea0] Took 1.02 seconds to deallocate network for instance. [ 1248.243733] env[70020]: DEBUG oslo_vmware.api [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b9a86c-1d4e-7a1a-1c7e-838deacdf004, 'name': SearchDatastore_Task, 'duration_secs': 0.009488} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.244747] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-920650ed-e4c6-42d0-bcf4-02ac98b83a78 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.248124] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1248.249055] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1248.256079] env[70020]: DEBUG oslo_vmware.api [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1248.256079] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5299f7e5-fa2d-fece-7487-1cb2c0588eea" [ 1248.256079] env[70020]: _type = "Task" [ 1248.256079] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.269357] env[70020]: DEBUG oslo_vmware.api [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5299f7e5-fa2d-fece-7487-1cb2c0588eea, 'name': SearchDatastore_Task, 'duration_secs': 0.010225} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.269643] env[70020]: DEBUG oslo_concurrency.lockutils [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1248.269928] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 845ea37a-9945-49cd-a1bd-3da91f4af16b/845ea37a-9945-49cd-a1bd-3da91f4af16b.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1248.270217] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-19d303e0-0d54-4f1d-b18c-51cc49ea9a03 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.276871] env[70020]: DEBUG oslo_vmware.api [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1248.276871] env[70020]: value = "task-3619270" [ 1248.276871] env[70020]: _type = "Task" [ 1248.276871] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.285254] env[70020]: DEBUG oslo_vmware.api [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619270, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.330316] env[70020]: DEBUG nova.compute.manager [req-6f04ff71-69f1-455e-a274-8292801db650 req-cb916ea2-1501-492f-a633-9d04f05824b0 service nova] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Received event network-changed-c963937e-c9c9-452b-a0d2-b2a4314681dd {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1248.330436] env[70020]: DEBUG nova.compute.manager [req-6f04ff71-69f1-455e-a274-8292801db650 req-cb916ea2-1501-492f-a633-9d04f05824b0 service nova] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Refreshing instance network info cache due to event network-changed-c963937e-c9c9-452b-a0d2-b2a4314681dd. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1248.330873] env[70020]: DEBUG oslo_concurrency.lockutils [req-6f04ff71-69f1-455e-a274-8292801db650 req-cb916ea2-1501-492f-a633-9d04f05824b0 service nova] Acquiring lock "refresh_cache-d28f6dff-8f9f-41d4-87ae-0ff87327d042" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1248.331041] env[70020]: DEBUG oslo_concurrency.lockutils [req-6f04ff71-69f1-455e-a274-8292801db650 req-cb916ea2-1501-492f-a633-9d04f05824b0 service nova] Acquired lock "refresh_cache-d28f6dff-8f9f-41d4-87ae-0ff87327d042" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1248.331210] env[70020]: DEBUG nova.network.neutron [req-6f04ff71-69f1-455e-a274-8292801db650 req-cb916ea2-1501-492f-a633-9d04f05824b0 service nova] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Refreshing network info cache for port c963937e-c9c9-452b-a0d2-b2a4314681dd {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1248.522214] env[70020]: DEBUG oslo_vmware.api [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619269, 'name': PowerOnVM_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.719223] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ef3dd78a-8810-4031-b7f0-9ab59ebe8f65 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1248.754120] env[70020]: INFO nova.compute.claims [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1248.787308] env[70020]: DEBUG oslo_vmware.api [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619270, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.469301} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.787505] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 845ea37a-9945-49cd-a1bd-3da91f4af16b/845ea37a-9945-49cd-a1bd-3da91f4af16b.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1248.787747] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1248.788038] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6ebac0da-d1c4-4c63-be05-0f84ad20a24d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.794123] env[70020]: DEBUG oslo_vmware.api [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1248.794123] env[70020]: value = "task-3619271" [ 1248.794123] env[70020]: _type = "Task" [ 1248.794123] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.802425] env[70020]: DEBUG oslo_vmware.api [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619271, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.020683] env[70020]: DEBUG oslo_vmware.api [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619269, 'name': PowerOnVM_Task, 'duration_secs': 0.735609} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.020985] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1249.021215] env[70020]: INFO nova.compute.manager [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Took 8.56 seconds to spawn the instance on the hypervisor. [ 1249.021398] env[70020]: DEBUG nova.compute.manager [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1249.022415] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bbf813d-0b70-4e17-80b0-50f6affc0979 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.080236] env[70020]: DEBUG nova.network.neutron [req-6f04ff71-69f1-455e-a274-8292801db650 req-cb916ea2-1501-492f-a633-9d04f05824b0 service nova] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Updated VIF entry in instance network info cache for port c963937e-c9c9-452b-a0d2-b2a4314681dd. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1249.080612] env[70020]: DEBUG nova.network.neutron [req-6f04ff71-69f1-455e-a274-8292801db650 req-cb916ea2-1501-492f-a633-9d04f05824b0 service nova] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Updating instance_info_cache with network_info: [{"id": "c963937e-c9c9-452b-a0d2-b2a4314681dd", "address": "fa:16:3e:d7:7d:d0", "network": {"id": "b566519a-edda-4c57-92ca-a702e586c638", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-790170115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74b060ffb3ac4ecd95dcd85d4744dc2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc963937e-c9", "ovs_interfaceid": "c963937e-c9c9-452b-a0d2-b2a4314681dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1249.260420] env[70020]: INFO nova.compute.resource_tracker [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Updating resource usage from migration 19e094b8-1dbe-457c-8db7-7f0a15bb22db [ 1249.306129] env[70020]: DEBUG oslo_vmware.api [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619271, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072129} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.306386] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1249.307218] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feb785a5-58bd-4902-87ef-f1fe69429bac {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.331753] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] 845ea37a-9945-49cd-a1bd-3da91f4af16b/845ea37a-9945-49cd-a1bd-3da91f4af16b.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1249.334359] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6e4082cf-93c8-4417-9044-9a572d78c44a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.356045] env[70020]: DEBUG oslo_vmware.api [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1249.356045] env[70020]: value = "task-3619272" [ 1249.356045] env[70020]: _type = "Task" [ 1249.356045] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.365894] env[70020]: DEBUG oslo_vmware.api [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619272, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.400791] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f61ffea5-733c-45c1-9259-ca54c7269109 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.408113] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bdb525a-2443-4fca-a828-7989135bff63 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.438985] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a773eb16-95d8-4fef-9f1e-db339ccd9780 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.446449] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed6e3246-834b-4007-afdc-1219b36aaad1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.460206] env[70020]: DEBUG nova.compute.provider_tree [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1249.540353] env[70020]: INFO nova.compute.manager [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Took 15.46 seconds to build instance. [ 1249.583763] env[70020]: DEBUG oslo_concurrency.lockutils [req-6f04ff71-69f1-455e-a274-8292801db650 req-cb916ea2-1501-492f-a633-9d04f05824b0 service nova] Releasing lock "refresh_cache-d28f6dff-8f9f-41d4-87ae-0ff87327d042" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1249.867059] env[70020]: DEBUG oslo_vmware.api [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619272, 'name': ReconfigVM_Task, 'duration_secs': 0.284717} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.867059] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Reconfigured VM instance instance-0000007b to attach disk [datastore1] 845ea37a-9945-49cd-a1bd-3da91f4af16b/845ea37a-9945-49cd-a1bd-3da91f4af16b.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1249.867406] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0c4d813b-94c1-45e3-b524-2c28ed3b657b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.873097] env[70020]: DEBUG oslo_vmware.api [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1249.873097] env[70020]: value = "task-3619274" [ 1249.873097] env[70020]: _type = "Task" [ 1249.873097] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.880397] env[70020]: DEBUG oslo_vmware.api [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619274, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.963735] env[70020]: DEBUG nova.scheduler.client.report [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1250.041951] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ef389d5e-7116-4c23-8542-64022bb2877a tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "e96aae17-5ae5-404b-bbe3-46777f7c34d2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.973s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1250.384485] env[70020]: DEBUG oslo_vmware.api [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619274, 'name': Rename_Task, 'duration_secs': 0.13626} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.384782] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1250.385051] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6b6a6a5e-b34b-4fad-b1fc-ec076f570505 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.391644] env[70020]: DEBUG oslo_vmware.api [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1250.391644] env[70020]: value = "task-3619275" [ 1250.391644] env[70020]: _type = "Task" [ 1250.391644] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.399883] env[70020]: DEBUG oslo_vmware.api [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619275, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.469037] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.220s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1250.469364] env[70020]: INFO nova.compute.manager [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Migrating [ 1250.476550] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ef3dd78a-8810-4031-b7f0-9ab59ebe8f65 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.757s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1250.476678] env[70020]: DEBUG nova.objects.instance [None req-ef3dd78a-8810-4031-b7f0-9ab59ebe8f65 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Lazy-loading 'resources' on Instance uuid 602328f7-258a-44f5-802c-d580824beea0 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1250.886753] env[70020]: DEBUG nova.compute.manager [req-34ba2079-b608-4768-b4d5-710866f41d34 req-5c46172d-d1a2-4745-ad34-177f549253b6 service nova] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Received event network-changed-0889717d-3194-4204-a46b-57e94fc35d6c {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1250.886874] env[70020]: DEBUG nova.compute.manager [req-34ba2079-b608-4768-b4d5-710866f41d34 req-5c46172d-d1a2-4745-ad34-177f549253b6 service nova] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Refreshing instance network info cache due to event network-changed-0889717d-3194-4204-a46b-57e94fc35d6c. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1250.887117] env[70020]: DEBUG oslo_concurrency.lockutils [req-34ba2079-b608-4768-b4d5-710866f41d34 req-5c46172d-d1a2-4745-ad34-177f549253b6 service nova] Acquiring lock "refresh_cache-e96aae17-5ae5-404b-bbe3-46777f7c34d2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1250.887269] env[70020]: DEBUG oslo_concurrency.lockutils [req-34ba2079-b608-4768-b4d5-710866f41d34 req-5c46172d-d1a2-4745-ad34-177f549253b6 service nova] Acquired lock "refresh_cache-e96aae17-5ae5-404b-bbe3-46777f7c34d2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1250.887427] env[70020]: DEBUG nova.network.neutron [req-34ba2079-b608-4768-b4d5-710866f41d34 req-5c46172d-d1a2-4745-ad34-177f549253b6 service nova] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Refreshing network info cache for port 0889717d-3194-4204-a46b-57e94fc35d6c {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1250.902342] env[70020]: DEBUG oslo_vmware.api [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619275, 'name': PowerOnVM_Task, 'duration_secs': 0.410279} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.902573] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1250.903100] env[70020]: INFO nova.compute.manager [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Took 6.49 seconds to spawn the instance on the hypervisor. [ 1250.903100] env[70020]: DEBUG nova.compute.manager [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1250.903700] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1199077-0d15-435d-a1b0-b765e7e09be1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.987943] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "refresh_cache-d28f6dff-8f9f-41d4-87ae-0ff87327d042" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1250.988119] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquired lock "refresh_cache-d28f6dff-8f9f-41d4-87ae-0ff87327d042" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1250.988667] env[70020]: DEBUG nova.network.neutron [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1251.113768] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3a3bff7-11e2-450c-ae83-296b9e220b7c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.122013] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53420841-2094-4763-b1b7-d837ae4f334a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.158047] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60f18736-9c98-474b-8008-968ed9c6362c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.165259] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d586dd9f-e1c3-45c4-b510-641f329a5fdd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.179189] env[70020]: DEBUG nova.compute.provider_tree [None req-ef3dd78a-8810-4031-b7f0-9ab59ebe8f65 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1251.426037] env[70020]: INFO nova.compute.manager [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Took 12.54 seconds to build instance. [ 1251.613295] env[70020]: DEBUG nova.network.neutron [req-34ba2079-b608-4768-b4d5-710866f41d34 req-5c46172d-d1a2-4745-ad34-177f549253b6 service nova] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Updated VIF entry in instance network info cache for port 0889717d-3194-4204-a46b-57e94fc35d6c. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1251.613724] env[70020]: DEBUG nova.network.neutron [req-34ba2079-b608-4768-b4d5-710866f41d34 req-5c46172d-d1a2-4745-ad34-177f549253b6 service nova] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Updating instance_info_cache with network_info: [{"id": "0889717d-3194-4204-a46b-57e94fc35d6c", "address": "fa:16:3e:ed:64:03", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0889717d-31", "ovs_interfaceid": "0889717d-3194-4204-a46b-57e94fc35d6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1251.682516] env[70020]: DEBUG nova.scheduler.client.report [None req-ef3dd78a-8810-4031-b7f0-9ab59ebe8f65 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1251.705420] env[70020]: DEBUG nova.network.neutron [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Updating instance_info_cache with network_info: [{"id": "c963937e-c9c9-452b-a0d2-b2a4314681dd", "address": "fa:16:3e:d7:7d:d0", "network": {"id": "b566519a-edda-4c57-92ca-a702e586c638", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-790170115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74b060ffb3ac4ecd95dcd85d4744dc2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc963937e-c9", "ovs_interfaceid": "c963937e-c9c9-452b-a0d2-b2a4314681dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1251.803568] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "4b73ae75-c403-4268-8eab-4d6c32aef950" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1251.803732] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "4b73ae75-c403-4268-8eab-4d6c32aef950" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1251.930451] env[70020]: DEBUG oslo_concurrency.lockutils [None req-24e5f61a-85e8-45e4-a145-f7bdb531f040 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "845ea37a-9945-49cd-a1bd-3da91f4af16b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.058s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1252.118991] env[70020]: DEBUG oslo_concurrency.lockutils [req-34ba2079-b608-4768-b4d5-710866f41d34 req-5c46172d-d1a2-4745-ad34-177f549253b6 service nova] Releasing lock "refresh_cache-e96aae17-5ae5-404b-bbe3-46777f7c34d2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1252.187564] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ef3dd78a-8810-4031-b7f0-9ab59ebe8f65 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.711s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1252.210962] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Releasing lock "refresh_cache-d28f6dff-8f9f-41d4-87ae-0ff87327d042" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1252.215840] env[70020]: INFO nova.scheduler.client.report [None req-ef3dd78a-8810-4031-b7f0-9ab59ebe8f65 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Deleted allocations for instance 602328f7-258a-44f5-802c-d580824beea0 [ 1252.305913] env[70020]: DEBUG nova.compute.manager [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1252.312895] env[70020]: DEBUG nova.compute.manager [req-50b4f7d7-f870-49c9-a0a6-a10663339a05 req-f44fa31f-3e08-4a3a-ba70-11128bd9c384 service nova] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Received event network-changed-f4583380-5208-4372-ab67-cc6b64a287d2 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1252.313243] env[70020]: DEBUG nova.compute.manager [req-50b4f7d7-f870-49c9-a0a6-a10663339a05 req-f44fa31f-3e08-4a3a-ba70-11128bd9c384 service nova] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Refreshing instance network info cache due to event network-changed-f4583380-5208-4372-ab67-cc6b64a287d2. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1252.315193] env[70020]: DEBUG oslo_concurrency.lockutils [req-50b4f7d7-f870-49c9-a0a6-a10663339a05 req-f44fa31f-3e08-4a3a-ba70-11128bd9c384 service nova] Acquiring lock "refresh_cache-845ea37a-9945-49cd-a1bd-3da91f4af16b" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1252.315193] env[70020]: DEBUG oslo_concurrency.lockutils [req-50b4f7d7-f870-49c9-a0a6-a10663339a05 req-f44fa31f-3e08-4a3a-ba70-11128bd9c384 service nova] Acquired lock "refresh_cache-845ea37a-9945-49cd-a1bd-3da91f4af16b" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1252.315193] env[70020]: DEBUG nova.network.neutron [req-50b4f7d7-f870-49c9-a0a6-a10663339a05 req-f44fa31f-3e08-4a3a-ba70-11128bd9c384 service nova] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Refreshing network info cache for port f4583380-5208-4372-ab67-cc6b64a287d2 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1252.594138] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquiring lock "399d55b7-2a79-4849-89b6-ff8d1c0d33e1" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1252.594380] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lock "399d55b7-2a79-4849-89b6-ff8d1c0d33e1" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1252.594620] env[70020]: INFO nova.compute.manager [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Shelving [ 1252.723312] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ef3dd78a-8810-4031-b7f0-9ab59ebe8f65 tempest-ServersListShow296Test-1523430024 tempest-ServersListShow296Test-1523430024-project-member] Lock "602328f7-258a-44f5-802c-d580824beea0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.227s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1252.827773] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1252.828078] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1252.829624] env[70020]: INFO nova.compute.claims [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1253.165396] env[70020]: DEBUG nova.network.neutron [req-50b4f7d7-f870-49c9-a0a6-a10663339a05 req-f44fa31f-3e08-4a3a-ba70-11128bd9c384 service nova] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Updated VIF entry in instance network info cache for port f4583380-5208-4372-ab67-cc6b64a287d2. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1253.165794] env[70020]: DEBUG nova.network.neutron [req-50b4f7d7-f870-49c9-a0a6-a10663339a05 req-f44fa31f-3e08-4a3a-ba70-11128bd9c384 service nova] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Updating instance_info_cache with network_info: [{"id": "f4583380-5208-4372-ab67-cc6b64a287d2", "address": "fa:16:3e:29:f4:ad", "network": {"id": "ba2c4d3c-4191-4de5-8533-90ca5dfc1dc0", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-599216784-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e3eae740ef84ef88aef113ed4d6e57b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4583380-52", "ovs_interfaceid": "f4583380-5208-4372-ab67-cc6b64a287d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1253.608344] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1253.608344] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ce08f8a5-0f0d-4338-832b-a6cd472139cc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.614110] env[70020]: DEBUG oslo_vmware.api [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1253.614110] env[70020]: value = "task-3619276" [ 1253.614110] env[70020]: _type = "Task" [ 1253.614110] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.622644] env[70020]: DEBUG oslo_vmware.api [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619276, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.668370] env[70020]: DEBUG oslo_concurrency.lockutils [req-50b4f7d7-f870-49c9-a0a6-a10663339a05 req-f44fa31f-3e08-4a3a-ba70-11128bd9c384 service nova] Releasing lock "refresh_cache-845ea37a-9945-49cd-a1bd-3da91f4af16b" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1253.733159] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-329a3366-1353-48bd-8f78-6906f0d1b432 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.752269] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Updating instance 'd28f6dff-8f9f-41d4-87ae-0ff87327d042' progress to 0 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1253.980035] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9449c6f-d68f-49da-bd6f-3e53c144f57c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.988045] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d46d18c-f1b2-427a-8022-07005f5772f0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.021188] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06e8ef9c-897e-447a-ade1-1d6c44818ba1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.029961] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bcc9ed4-183f-44d3-984c-2c2c52fdbaf4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.044561] env[70020]: DEBUG nova.compute.provider_tree [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1254.125720] env[70020]: DEBUG oslo_vmware.api [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619276, 'name': PowerOffVM_Task, 'duration_secs': 0.359024} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.127184] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1254.127368] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7235161a-7bfe-4fca-b425-b85d050db53c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.145958] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-962b6f81-24a6-45cd-8814-967bf218836f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.258868] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1254.259215] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e517cf7c-4d22-46b9-b8b1-04b4c85403f4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.266304] env[70020]: DEBUG oslo_vmware.api [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1254.266304] env[70020]: value = "task-3619277" [ 1254.266304] env[70020]: _type = "Task" [ 1254.266304] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.274418] env[70020]: DEBUG oslo_vmware.api [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619277, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.547666] env[70020]: DEBUG nova.scheduler.client.report [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1254.663220] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Creating Snapshot of the VM instance {{(pid=70020) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1254.663540] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-d43ece62-64f7-415c-9db5-5496c820e649 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.670883] env[70020]: DEBUG oslo_vmware.api [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1254.670883] env[70020]: value = "task-3619278" [ 1254.670883] env[70020]: _type = "Task" [ 1254.670883] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.681266] env[70020]: DEBUG oslo_vmware.api [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619278, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.776022] env[70020]: DEBUG oslo_vmware.api [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619277, 'name': PowerOffVM_Task, 'duration_secs': 0.467706} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.776356] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1254.776547] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Updating instance 'd28f6dff-8f9f-41d4-87ae-0ff87327d042' progress to 17 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1255.053067] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.224s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1255.053067] env[70020]: DEBUG nova.compute.manager [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1255.181271] env[70020]: DEBUG oslo_vmware.api [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619278, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.282597] env[70020]: DEBUG nova.virt.hardware [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1255.282994] env[70020]: DEBUG nova.virt.hardware [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1255.282994] env[70020]: DEBUG nova.virt.hardware [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1255.283117] env[70020]: DEBUG nova.virt.hardware [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1255.283248] env[70020]: DEBUG nova.virt.hardware [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1255.283394] env[70020]: DEBUG nova.virt.hardware [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1255.283597] env[70020]: DEBUG nova.virt.hardware [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1255.283753] env[70020]: DEBUG nova.virt.hardware [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1255.283910] env[70020]: DEBUG nova.virt.hardware [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1255.284081] env[70020]: DEBUG nova.virt.hardware [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1255.284302] env[70020]: DEBUG nova.virt.hardware [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1255.291416] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b0e54fff-e29a-49af-8a9e-2a7d8f58698b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.308084] env[70020]: DEBUG oslo_vmware.api [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1255.308084] env[70020]: value = "task-3619279" [ 1255.308084] env[70020]: _type = "Task" [ 1255.308084] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.316114] env[70020]: DEBUG oslo_vmware.api [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619279, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.558112] env[70020]: DEBUG nova.compute.utils [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1255.559568] env[70020]: DEBUG nova.compute.manager [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1255.559743] env[70020]: DEBUG nova.network.neutron [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1255.606223] env[70020]: DEBUG nova.policy [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '914fc4078a214da891e7d12d242504cb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0573da12f56f4b18a103e4e9fdfb9c19', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1255.682509] env[70020]: DEBUG oslo_vmware.api [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619278, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.818608] env[70020]: DEBUG oslo_vmware.api [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619279, 'name': ReconfigVM_Task, 'duration_secs': 0.157733} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.819796] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Updating instance 'd28f6dff-8f9f-41d4-87ae-0ff87327d042' progress to 33 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1255.895087] env[70020]: DEBUG nova.network.neutron [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Successfully created port: 8b1a9cab-123c-45b9-9703-a6e46606f140 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1256.062613] env[70020]: DEBUG nova.compute.manager [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1256.184544] env[70020]: DEBUG oslo_vmware.api [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619278, 'name': CreateSnapshot_Task, 'duration_secs': 1.348508} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.185453] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Created Snapshot of the VM instance {{(pid=70020) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1256.186025] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd663fb1-61a7-4503-ac54-dbf110700e6c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.325343] env[70020]: DEBUG nova.virt.hardware [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1256.325764] env[70020]: DEBUG nova.virt.hardware [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1256.325764] env[70020]: DEBUG nova.virt.hardware [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1256.325963] env[70020]: DEBUG nova.virt.hardware [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1256.326487] env[70020]: DEBUG nova.virt.hardware [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1256.326487] env[70020]: DEBUG nova.virt.hardware [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1256.326487] env[70020]: DEBUG nova.virt.hardware [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1256.326684] env[70020]: DEBUG nova.virt.hardware [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1256.326847] env[70020]: DEBUG nova.virt.hardware [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1256.326987] env[70020]: DEBUG nova.virt.hardware [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1256.327227] env[70020]: DEBUG nova.virt.hardware [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1256.332509] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Reconfiguring VM instance instance-00000079 to detach disk 2000 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1256.332791] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f021c0e6-6c50-4d11-a07e-71abda6250d5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.350922] env[70020]: DEBUG oslo_vmware.api [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1256.350922] env[70020]: value = "task-3619280" [ 1256.350922] env[70020]: _type = "Task" [ 1256.350922] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.359999] env[70020]: DEBUG oslo_vmware.api [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619280, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.705848] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Creating linked-clone VM from snapshot {{(pid=70020) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1256.707028] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-2552f91a-ec55-48d0-941d-98ade7493e09 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.715573] env[70020]: DEBUG oslo_vmware.api [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1256.715573] env[70020]: value = "task-3619281" [ 1256.715573] env[70020]: _type = "Task" [ 1256.715573] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.723700] env[70020]: DEBUG oslo_vmware.api [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619281, 'name': CloneVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.860984] env[70020]: DEBUG oslo_vmware.api [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619280, 'name': ReconfigVM_Task, 'duration_secs': 0.183561} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.861298] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Reconfigured VM instance instance-00000079 to detach disk 2000 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1256.862163] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78d58643-3e1f-409f-90f6-c3f5a166722a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.885741] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] volume-019bffbe-24bf-4b30-80fe-f387c8bba21b/volume-019bffbe-24bf-4b30-80fe-f387c8bba21b.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1256.886051] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dc5b642d-8776-474a-b163-102f80aaae3f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.905556] env[70020]: DEBUG oslo_vmware.api [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1256.905556] env[70020]: value = "task-3619282" [ 1256.905556] env[70020]: _type = "Task" [ 1256.905556] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.916863] env[70020]: DEBUG oslo_vmware.api [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619282, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.073508] env[70020]: DEBUG nova.compute.manager [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1257.100141] env[70020]: DEBUG nova.virt.hardware [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1257.100392] env[70020]: DEBUG nova.virt.hardware [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1257.100548] env[70020]: DEBUG nova.virt.hardware [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1257.100729] env[70020]: DEBUG nova.virt.hardware [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1257.100872] env[70020]: DEBUG nova.virt.hardware [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1257.101031] env[70020]: DEBUG nova.virt.hardware [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1257.101245] env[70020]: DEBUG nova.virt.hardware [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1257.101400] env[70020]: DEBUG nova.virt.hardware [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1257.101560] env[70020]: DEBUG nova.virt.hardware [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1257.101717] env[70020]: DEBUG nova.virt.hardware [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1257.101884] env[70020]: DEBUG nova.virt.hardware [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1257.102752] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c62c18f4-dd70-435d-81a7-109fbb5800ca {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.110705] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-422a5db8-eeb7-4cb2-91dc-b56b93fe6963 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.226946] env[70020]: DEBUG oslo_vmware.api [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619281, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.312135] env[70020]: DEBUG nova.compute.manager [req-17a2a738-c24f-4939-8d55-2ebb95cafbac req-f0b68d6a-05b1-4524-9684-eb84047c1955 service nova] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Received event network-vif-plugged-8b1a9cab-123c-45b9-9703-a6e46606f140 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1257.312356] env[70020]: DEBUG oslo_concurrency.lockutils [req-17a2a738-c24f-4939-8d55-2ebb95cafbac req-f0b68d6a-05b1-4524-9684-eb84047c1955 service nova] Acquiring lock "4b73ae75-c403-4268-8eab-4d6c32aef950-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1257.312615] env[70020]: DEBUG oslo_concurrency.lockutils [req-17a2a738-c24f-4939-8d55-2ebb95cafbac req-f0b68d6a-05b1-4524-9684-eb84047c1955 service nova] Lock "4b73ae75-c403-4268-8eab-4d6c32aef950-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1257.312743] env[70020]: DEBUG oslo_concurrency.lockutils [req-17a2a738-c24f-4939-8d55-2ebb95cafbac req-f0b68d6a-05b1-4524-9684-eb84047c1955 service nova] Lock "4b73ae75-c403-4268-8eab-4d6c32aef950-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1257.312882] env[70020]: DEBUG nova.compute.manager [req-17a2a738-c24f-4939-8d55-2ebb95cafbac req-f0b68d6a-05b1-4524-9684-eb84047c1955 service nova] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] No waiting events found dispatching network-vif-plugged-8b1a9cab-123c-45b9-9703-a6e46606f140 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1257.313077] env[70020]: WARNING nova.compute.manager [req-17a2a738-c24f-4939-8d55-2ebb95cafbac req-f0b68d6a-05b1-4524-9684-eb84047c1955 service nova] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Received unexpected event network-vif-plugged-8b1a9cab-123c-45b9-9703-a6e46606f140 for instance with vm_state building and task_state spawning. [ 1257.400951] env[70020]: DEBUG nova.network.neutron [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Successfully updated port: 8b1a9cab-123c-45b9-9703-a6e46606f140 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1257.416169] env[70020]: DEBUG oslo_vmware.api [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619282, 'name': ReconfigVM_Task, 'duration_secs': 0.48471} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.417009] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Reconfigured VM instance instance-00000079 to attach disk [datastore1] volume-019bffbe-24bf-4b30-80fe-f387c8bba21b/volume-019bffbe-24bf-4b30-80fe-f387c8bba21b.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1257.417297] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Updating instance 'd28f6dff-8f9f-41d4-87ae-0ff87327d042' progress to 50 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1257.726282] env[70020]: DEBUG oslo_vmware.api [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619281, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.904073] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "refresh_cache-4b73ae75-c403-4268-8eab-4d6c32aef950" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1257.904073] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquired lock "refresh_cache-4b73ae75-c403-4268-8eab-4d6c32aef950" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1257.904073] env[70020]: DEBUG nova.network.neutron [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1257.923600] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ffa3515-ea72-40ce-8cce-484d5073c2ac {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.945536] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42b14883-0606-403e-9700-6321f6fbf893 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.963759] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Updating instance 'd28f6dff-8f9f-41d4-87ae-0ff87327d042' progress to 67 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1258.227806] env[70020]: DEBUG oslo_vmware.api [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619281, 'name': CloneVM_Task} progress is 95%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.441374] env[70020]: DEBUG nova.network.neutron [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1258.577498] env[70020]: DEBUG nova.network.neutron [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Updating instance_info_cache with network_info: [{"id": "8b1a9cab-123c-45b9-9703-a6e46606f140", "address": "fa:16:3e:0b:81:cc", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b1a9cab-12", "ovs_interfaceid": "8b1a9cab-123c-45b9-9703-a6e46606f140", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1258.728637] env[70020]: DEBUG oslo_vmware.api [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619281, 'name': CloneVM_Task, 'duration_secs': 1.992199} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.729124] env[70020]: INFO nova.virt.vmwareapi.vmops [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Created linked-clone VM from snapshot [ 1258.729870] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c160dcb5-f3df-45df-a366-56aa7da0a3da {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.737663] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Uploading image 1824125a-6f02-476b-8b5c-9899bfa470d7 {{(pid=70020) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1258.761175] env[70020]: DEBUG oslo_vmware.rw_handles [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1258.761175] env[70020]: value = "vm-721841" [ 1258.761175] env[70020]: _type = "VirtualMachine" [ 1258.761175] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1258.761441] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-edead8c0-d567-4cd2-9be3-9e108e796e2b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.767422] env[70020]: DEBUG oslo_vmware.rw_handles [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lease: (returnval){ [ 1258.767422] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52628929-3d30-6023-aa96-b5a534190154" [ 1258.767422] env[70020]: _type = "HttpNfcLease" [ 1258.767422] env[70020]: } obtained for exporting VM: (result){ [ 1258.767422] env[70020]: value = "vm-721841" [ 1258.767422] env[70020]: _type = "VirtualMachine" [ 1258.767422] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1258.767733] env[70020]: DEBUG oslo_vmware.api [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the lease: (returnval){ [ 1258.767733] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52628929-3d30-6023-aa96-b5a534190154" [ 1258.767733] env[70020]: _type = "HttpNfcLease" [ 1258.767733] env[70020]: } to be ready. {{(pid=70020) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1258.773840] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1258.773840] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52628929-3d30-6023-aa96-b5a534190154" [ 1258.773840] env[70020]: _type = "HttpNfcLease" [ 1258.773840] env[70020]: } is initializing. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1259.080393] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Releasing lock "refresh_cache-4b73ae75-c403-4268-8eab-4d6c32aef950" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1259.080644] env[70020]: DEBUG nova.compute.manager [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Instance network_info: |[{"id": "8b1a9cab-123c-45b9-9703-a6e46606f140", "address": "fa:16:3e:0b:81:cc", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b1a9cab-12", "ovs_interfaceid": "8b1a9cab-123c-45b9-9703-a6e46606f140", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1259.081157] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0b:81:cc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd5970ab5-34b8-4065-bfa6-f568b8f103b7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8b1a9cab-123c-45b9-9703-a6e46606f140', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1259.089131] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1259.089938] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1259.090230] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4634bea3-f162-4528-a7e0-c556d9da97a8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.110381] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1259.110381] env[70020]: value = "task-3619284" [ 1259.110381] env[70020]: _type = "Task" [ 1259.110381] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1259.118310] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619284, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.275943] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1259.275943] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52628929-3d30-6023-aa96-b5a534190154" [ 1259.275943] env[70020]: _type = "HttpNfcLease" [ 1259.275943] env[70020]: } is ready. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1259.276246] env[70020]: DEBUG oslo_vmware.rw_handles [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1259.276246] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52628929-3d30-6023-aa96-b5a534190154" [ 1259.276246] env[70020]: _type = "HttpNfcLease" [ 1259.276246] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1259.277059] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f49625eb-1730-4f87-b345-6f7ce1167734 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.284348] env[70020]: DEBUG oslo_vmware.rw_handles [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523970c5-44c4-600a-d8cd-60589db811bb/disk-0.vmdk from lease info. {{(pid=70020) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1259.284521] env[70020]: DEBUG oslo_vmware.rw_handles [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523970c5-44c4-600a-d8cd-60589db811bb/disk-0.vmdk for reading. {{(pid=70020) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1259.368228] env[70020]: DEBUG nova.compute.manager [req-6476a31a-63e7-4d12-9f8f-7caff3244238 req-f3287fac-a1d4-464b-87b4-3d2049451412 service nova] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Received event network-changed-8b1a9cab-123c-45b9-9703-a6e46606f140 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1259.368433] env[70020]: DEBUG nova.compute.manager [req-6476a31a-63e7-4d12-9f8f-7caff3244238 req-f3287fac-a1d4-464b-87b4-3d2049451412 service nova] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Refreshing instance network info cache due to event network-changed-8b1a9cab-123c-45b9-9703-a6e46606f140. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1259.368650] env[70020]: DEBUG oslo_concurrency.lockutils [req-6476a31a-63e7-4d12-9f8f-7caff3244238 req-f3287fac-a1d4-464b-87b4-3d2049451412 service nova] Acquiring lock "refresh_cache-4b73ae75-c403-4268-8eab-4d6c32aef950" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1259.368791] env[70020]: DEBUG oslo_concurrency.lockutils [req-6476a31a-63e7-4d12-9f8f-7caff3244238 req-f3287fac-a1d4-464b-87b4-3d2049451412 service nova] Acquired lock "refresh_cache-4b73ae75-c403-4268-8eab-4d6c32aef950" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1259.368950] env[70020]: DEBUG nova.network.neutron [req-6476a31a-63e7-4d12-9f8f-7caff3244238 req-f3287fac-a1d4-464b-87b4-3d2049451412 service nova] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Refreshing network info cache for port 8b1a9cab-123c-45b9-9703-a6e46606f140 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1259.419162] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-73e1c905-bb2b-409a-ab90-b57c9d905dd4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.607283] env[70020]: DEBUG nova.network.neutron [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Port c963937e-c9c9-452b-a0d2-b2a4314681dd binding to destination host cpu-1 is already ACTIVE {{(pid=70020) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1259.620646] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619284, 'name': CreateVM_Task, 'duration_secs': 0.321153} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1259.622219] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1259.624819] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1259.625496] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1259.626011] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1259.626353] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff79fd3b-5b24-4cb8-8b0d-8e1a83b95f9e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.631559] env[70020]: DEBUG oslo_vmware.api [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1259.631559] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52a671c5-c773-ef87-752c-0808da14a7da" [ 1259.631559] env[70020]: _type = "Task" [ 1259.631559] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1259.640453] env[70020]: DEBUG oslo_vmware.api [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52a671c5-c773-ef87-752c-0808da14a7da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.090552] env[70020]: DEBUG nova.network.neutron [req-6476a31a-63e7-4d12-9f8f-7caff3244238 req-f3287fac-a1d4-464b-87b4-3d2049451412 service nova] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Updated VIF entry in instance network info cache for port 8b1a9cab-123c-45b9-9703-a6e46606f140. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1260.091271] env[70020]: DEBUG nova.network.neutron [req-6476a31a-63e7-4d12-9f8f-7caff3244238 req-f3287fac-a1d4-464b-87b4-3d2049451412 service nova] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Updating instance_info_cache with network_info: [{"id": "8b1a9cab-123c-45b9-9703-a6e46606f140", "address": "fa:16:3e:0b:81:cc", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b1a9cab-12", "ovs_interfaceid": "8b1a9cab-123c-45b9-9703-a6e46606f140", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1260.142222] env[70020]: DEBUG oslo_vmware.api [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52a671c5-c773-ef87-752c-0808da14a7da, 'name': SearchDatastore_Task, 'duration_secs': 0.011658} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1260.142579] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1260.142861] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1260.143148] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1260.143348] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1260.143579] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1260.144286] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-84e727ac-5517-4781-94e0-f3f669bd2128 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.152526] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1260.152660] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1260.153410] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8677907-5999-4465-8376-45ec05704e7a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.159348] env[70020]: DEBUG oslo_vmware.api [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1260.159348] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52d0d7aa-462e-a38e-7508-ad677b9a6dc3" [ 1260.159348] env[70020]: _type = "Task" [ 1260.159348] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.166883] env[70020]: DEBUG oslo_vmware.api [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52d0d7aa-462e-a38e-7508-ad677b9a6dc3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.595131] env[70020]: DEBUG oslo_concurrency.lockutils [req-6476a31a-63e7-4d12-9f8f-7caff3244238 req-f3287fac-a1d4-464b-87b4-3d2049451412 service nova] Releasing lock "refresh_cache-4b73ae75-c403-4268-8eab-4d6c32aef950" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1260.631202] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "d28f6dff-8f9f-41d4-87ae-0ff87327d042-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1260.633292] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "d28f6dff-8f9f-41d4-87ae-0ff87327d042-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1260.633292] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "d28f6dff-8f9f-41d4-87ae-0ff87327d042-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1260.670990] env[70020]: DEBUG oslo_vmware.api [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52d0d7aa-462e-a38e-7508-ad677b9a6dc3, 'name': SearchDatastore_Task, 'duration_secs': 0.015054} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1260.671857] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd35125b-7e9c-4059-99d5-74ea7c20b370 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.677217] env[70020]: DEBUG oslo_vmware.api [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1260.677217] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52843609-2d6f-8137-680d-65383d0a0218" [ 1260.677217] env[70020]: _type = "Task" [ 1260.677217] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.685512] env[70020]: DEBUG oslo_vmware.api [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52843609-2d6f-8137-680d-65383d0a0218, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.187710] env[70020]: DEBUG oslo_vmware.api [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52843609-2d6f-8137-680d-65383d0a0218, 'name': SearchDatastore_Task, 'duration_secs': 0.028164} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.187999] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1261.188288] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 4b73ae75-c403-4268-8eab-4d6c32aef950/4b73ae75-c403-4268-8eab-4d6c32aef950.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1261.188639] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-063639ba-7513-4ee6-9141-3c4a27b5d428 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.195807] env[70020]: DEBUG oslo_vmware.api [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1261.195807] env[70020]: value = "task-3619285" [ 1261.195807] env[70020]: _type = "Task" [ 1261.195807] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1261.204576] env[70020]: DEBUG oslo_vmware.api [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619285, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.670991] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "refresh_cache-d28f6dff-8f9f-41d4-87ae-0ff87327d042" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1261.671343] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquired lock "refresh_cache-d28f6dff-8f9f-41d4-87ae-0ff87327d042" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1261.671516] env[70020]: DEBUG nova.network.neutron [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1261.707135] env[70020]: DEBUG oslo_vmware.api [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619285, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.207276] env[70020]: DEBUG oslo_vmware.api [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619285, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.539394} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1262.209931] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore2] 4b73ae75-c403-4268-8eab-4d6c32aef950/4b73ae75-c403-4268-8eab-4d6c32aef950.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1262.210175] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1262.210436] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-12f30682-9490-41af-8ec5-6733d42fa4e2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.218351] env[70020]: DEBUG oslo_vmware.api [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1262.218351] env[70020]: value = "task-3619286" [ 1262.218351] env[70020]: _type = "Task" [ 1262.218351] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.226214] env[70020]: DEBUG oslo_vmware.api [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619286, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.405455] env[70020]: DEBUG nova.network.neutron [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Updating instance_info_cache with network_info: [{"id": "c963937e-c9c9-452b-a0d2-b2a4314681dd", "address": "fa:16:3e:d7:7d:d0", "network": {"id": "b566519a-edda-4c57-92ca-a702e586c638", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-790170115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74b060ffb3ac4ecd95dcd85d4744dc2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc963937e-c9", "ovs_interfaceid": "c963937e-c9c9-452b-a0d2-b2a4314681dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1262.728297] env[70020]: DEBUG oslo_vmware.api [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619286, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071056} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1262.728770] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1262.729165] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8df5c996-a3e8-4d2e-a9a0-51e5580af684 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.750652] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Reconfiguring VM instance instance-0000007c to attach disk [datastore2] 4b73ae75-c403-4268-8eab-4d6c32aef950/4b73ae75-c403-4268-8eab-4d6c32aef950.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1262.750901] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c1bf23e-2dab-492e-b551-656560cf6122 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.770767] env[70020]: DEBUG oslo_vmware.api [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1262.770767] env[70020]: value = "task-3619287" [ 1262.770767] env[70020]: _type = "Task" [ 1262.770767] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.778570] env[70020]: DEBUG oslo_vmware.api [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619287, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.908518] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Releasing lock "refresh_cache-d28f6dff-8f9f-41d4-87ae-0ff87327d042" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1263.135502] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager.update_available_resource {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1263.284046] env[70020]: DEBUG oslo_vmware.api [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619287, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.417405] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a653508-675c-4f56-b74c-2026a9dbf1ff {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.424308] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9481d78-bd0e-4cfb-a69d-54d9cb1421e5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.639809] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1263.639809] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1263.639809] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1263.639809] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=70020) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1263.640689] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f52367-82a3-4150-b6be-fa44cf9a4557 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.648985] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea19b57d-1f87-4a27-912c-23627680f29c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.663232] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0574b090-c37f-4ec6-87ab-8c99b6da7f38 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.670310] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9c62c10-4031-4de1-9027-011519177908 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.699887] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179528MB free_disk=76GB free_vcpus=48 pci_devices=None {{(pid=70020) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1263.700110] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1263.700362] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1263.782454] env[70020]: DEBUG oslo_vmware.api [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619287, 'name': ReconfigVM_Task, 'duration_secs': 0.597716} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1263.782749] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Reconfigured VM instance instance-0000007c to attach disk [datastore2] 4b73ae75-c403-4268-8eab-4d6c32aef950/4b73ae75-c403-4268-8eab-4d6c32aef950.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1263.783412] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f4d65c8c-b901-40d5-a2a6-f8d1a668a877 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.790481] env[70020]: DEBUG oslo_vmware.api [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1263.790481] env[70020]: value = "task-3619288" [ 1263.790481] env[70020]: _type = "Task" [ 1263.790481] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1263.798883] env[70020]: DEBUG oslo_vmware.api [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619288, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.302678] env[70020]: DEBUG oslo_vmware.api [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619288, 'name': Rename_Task, 'duration_secs': 0.283196} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.302678] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1264.302916] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-67cff309-a385-4d0e-bcad-dd7593a8b29a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.309602] env[70020]: DEBUG oslo_vmware.api [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1264.309602] env[70020]: value = "task-3619289" [ 1264.309602] env[70020]: _type = "Task" [ 1264.309602] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.317681] env[70020]: DEBUG oslo_vmware.api [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619289, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.530713] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-620942a6-31c2-4fdc-a0c1-b6ae9dbaac12 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.549939] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29c0cb3f-5737-4e92-adcd-4f264ea7b9a3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.557027] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Updating instance 'd28f6dff-8f9f-41d4-87ae-0ff87327d042' progress to 83 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1264.710459] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Applying migration context for instance d28f6dff-8f9f-41d4-87ae-0ff87327d042 as it has an incoming, in-progress migration 19e094b8-1dbe-457c-8db7-7f0a15bb22db. Migration status is post-migrating {{(pid=70020) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1264.711587] env[70020]: INFO nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Updating resource usage from migration 19e094b8-1dbe-457c-8db7-7f0a15bb22db [ 1264.729694] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 96966bf2-a9ff-48ba-be3f-c767e7b6eedd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1264.729954] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance c9a3fb0f-95bf-4b51-ac06-99415acfa9cb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1264.730201] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 399d55b7-2a79-4849-89b6-ff8d1c0d33e1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1264.730416] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance e96aae17-5ae5-404b-bbe3-46777f7c34d2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1264.730645] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 845ea37a-9945-49cd-a1bd-3da91f4af16b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1264.730845] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Migration 19e094b8-1dbe-457c-8db7-7f0a15bb22db is active on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 192}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1264.731051] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance d28f6dff-8f9f-41d4-87ae-0ff87327d042 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1264.731249] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 4b73ae75-c403-4268-8eab-4d6c32aef950 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1264.731527] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=70020) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1264.731737] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2112MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=70020) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1264.820092] env[70020]: DEBUG oslo_vmware.api [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619289, 'name': PowerOnVM_Task, 'duration_secs': 0.48906} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.822688] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1264.822894] env[70020]: INFO nova.compute.manager [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Took 7.75 seconds to spawn the instance on the hypervisor. [ 1264.823493] env[70020]: DEBUG nova.compute.manager [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1264.824153] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-190fa038-921e-4c22-a583-39fca098f036 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.858138] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-318707c0-a02a-41cd-8786-f8ad9367dd4e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.866932] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aefe1c73-388d-428f-b210-c16acad5bcd3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.899887] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72e37588-8c4e-460e-a6e2-eb204acab602 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.907761] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6898cb4b-ecfd-4393-ba57-c36bf7d697d7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.923707] env[70020]: DEBUG nova.compute.provider_tree [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1265.063595] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1265.064381] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2517e3ee-9acd-4865-9c9c-9fe8f76eee35 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.072528] env[70020]: DEBUG oslo_vmware.api [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1265.072528] env[70020]: value = "task-3619290" [ 1265.072528] env[70020]: _type = "Task" [ 1265.072528] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1265.080542] env[70020]: DEBUG oslo_vmware.api [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619290, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1265.342513] env[70020]: INFO nova.compute.manager [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Took 12.53 seconds to build instance. [ 1265.426764] env[70020]: DEBUG nova.scheduler.client.report [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1265.583825] env[70020]: DEBUG oslo_vmware.api [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619290, 'name': PowerOnVM_Task, 'duration_secs': 0.470432} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.584317] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1265.584374] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b4c9b3fc-a134-4173-9d9a-76e1e763a12e tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Updating instance 'd28f6dff-8f9f-41d4-87ae-0ff87327d042' progress to 100 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1265.844372] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d8b17d75-4743-448d-b7d3-b7af85d484a8 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "4b73ae75-c403-4268-8eab-4d6c32aef950" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.040s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1265.932144] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=70020) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1265.932510] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.232s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1266.119212] env[70020]: DEBUG nova.compute.manager [req-708deaec-84f5-413a-8bda-b322bc99761a req-420dd5ac-1c2b-4ecf-99b0-9aacfd7c980d service nova] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Received event network-changed-0889717d-3194-4204-a46b-57e94fc35d6c {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1266.119423] env[70020]: DEBUG nova.compute.manager [req-708deaec-84f5-413a-8bda-b322bc99761a req-420dd5ac-1c2b-4ecf-99b0-9aacfd7c980d service nova] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Refreshing instance network info cache due to event network-changed-0889717d-3194-4204-a46b-57e94fc35d6c. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1266.119632] env[70020]: DEBUG oslo_concurrency.lockutils [req-708deaec-84f5-413a-8bda-b322bc99761a req-420dd5ac-1c2b-4ecf-99b0-9aacfd7c980d service nova] Acquiring lock "refresh_cache-e96aae17-5ae5-404b-bbe3-46777f7c34d2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1266.119762] env[70020]: DEBUG oslo_concurrency.lockutils [req-708deaec-84f5-413a-8bda-b322bc99761a req-420dd5ac-1c2b-4ecf-99b0-9aacfd7c980d service nova] Acquired lock "refresh_cache-e96aae17-5ae5-404b-bbe3-46777f7c34d2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1266.119915] env[70020]: DEBUG nova.network.neutron [req-708deaec-84f5-413a-8bda-b322bc99761a req-420dd5ac-1c2b-4ecf-99b0-9aacfd7c980d service nova] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Refreshing network info cache for port 0889717d-3194-4204-a46b-57e94fc35d6c {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1266.818170] env[70020]: DEBUG nova.network.neutron [req-708deaec-84f5-413a-8bda-b322bc99761a req-420dd5ac-1c2b-4ecf-99b0-9aacfd7c980d service nova] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Updated VIF entry in instance network info cache for port 0889717d-3194-4204-a46b-57e94fc35d6c. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1266.818595] env[70020]: DEBUG nova.network.neutron [req-708deaec-84f5-413a-8bda-b322bc99761a req-420dd5ac-1c2b-4ecf-99b0-9aacfd7c980d service nova] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Updating instance_info_cache with network_info: [{"id": "0889717d-3194-4204-a46b-57e94fc35d6c", "address": "fa:16:3e:ed:64:03", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0889717d-31", "ovs_interfaceid": "0889717d-3194-4204-a46b-57e94fc35d6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1266.932390] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1266.932780] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1266.933258] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1266.933631] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1266.933952] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1266.934506] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=70020) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1267.136393] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1267.321192] env[70020]: DEBUG oslo_concurrency.lockutils [req-708deaec-84f5-413a-8bda-b322bc99761a req-420dd5ac-1c2b-4ecf-99b0-9aacfd7c980d service nova] Releasing lock "refresh_cache-e96aae17-5ae5-404b-bbe3-46777f7c34d2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1267.751171] env[70020]: DEBUG oslo_vmware.rw_handles [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523970c5-44c4-600a-d8cd-60589db811bb/disk-0.vmdk. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1267.752144] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bcdb103-826d-443e-bbc5-c5816b3af7e8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.758547] env[70020]: DEBUG oslo_vmware.rw_handles [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523970c5-44c4-600a-d8cd-60589db811bb/disk-0.vmdk is in state: ready. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1267.758711] env[70020]: ERROR oslo_vmware.rw_handles [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523970c5-44c4-600a-d8cd-60589db811bb/disk-0.vmdk due to incomplete transfer. [ 1267.758940] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-0028ff20-1602-4fd1-ab5f-e4825fa3cf2a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.766558] env[70020]: DEBUG oslo_vmware.rw_handles [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523970c5-44c4-600a-d8cd-60589db811bb/disk-0.vmdk. {{(pid=70020) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1267.766751] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Uploaded image 1824125a-6f02-476b-8b5c-9899bfa470d7 to the Glance image server {{(pid=70020) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1267.769154] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Destroying the VM {{(pid=70020) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1267.769446] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-50a9e30c-2ff5-4dd2-bf87-53eeda933c32 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.775618] env[70020]: DEBUG oslo_vmware.api [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1267.775618] env[70020]: value = "task-3619291" [ 1267.775618] env[70020]: _type = "Task" [ 1267.775618] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.782975] env[70020]: DEBUG oslo_vmware.api [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619291, 'name': Destroy_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.787587] env[70020]: DEBUG oslo_concurrency.lockutils [None req-99c53ca5-4986-4cf1-ab03-eb6ba989e5d8 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "d28f6dff-8f9f-41d4-87ae-0ff87327d042" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1267.787811] env[70020]: DEBUG oslo_concurrency.lockutils [None req-99c53ca5-4986-4cf1-ab03-eb6ba989e5d8 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "d28f6dff-8f9f-41d4-87ae-0ff87327d042" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1267.788059] env[70020]: DEBUG nova.compute.manager [None req-99c53ca5-4986-4cf1-ab03-eb6ba989e5d8 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Going to confirm migration 8 {{(pid=70020) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1268.151630] env[70020]: DEBUG nova.compute.manager [req-1c62fc6a-ab45-4705-810a-8c5ad93e575e req-b8b6d5cf-2f98-4582-896b-600b1257444a service nova] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Received event network-changed-8b1a9cab-123c-45b9-9703-a6e46606f140 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1268.151872] env[70020]: DEBUG nova.compute.manager [req-1c62fc6a-ab45-4705-810a-8c5ad93e575e req-b8b6d5cf-2f98-4582-896b-600b1257444a service nova] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Refreshing instance network info cache due to event network-changed-8b1a9cab-123c-45b9-9703-a6e46606f140. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1268.152021] env[70020]: DEBUG oslo_concurrency.lockutils [req-1c62fc6a-ab45-4705-810a-8c5ad93e575e req-b8b6d5cf-2f98-4582-896b-600b1257444a service nova] Acquiring lock "refresh_cache-4b73ae75-c403-4268-8eab-4d6c32aef950" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1268.152287] env[70020]: DEBUG oslo_concurrency.lockutils [req-1c62fc6a-ab45-4705-810a-8c5ad93e575e req-b8b6d5cf-2f98-4582-896b-600b1257444a service nova] Acquired lock "refresh_cache-4b73ae75-c403-4268-8eab-4d6c32aef950" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1268.152469] env[70020]: DEBUG nova.network.neutron [req-1c62fc6a-ab45-4705-810a-8c5ad93e575e req-b8b6d5cf-2f98-4582-896b-600b1257444a service nova] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Refreshing network info cache for port 8b1a9cab-123c-45b9-9703-a6e46606f140 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1268.286371] env[70020]: DEBUG oslo_vmware.api [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619291, 'name': Destroy_Task, 'duration_secs': 0.331482} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1268.286563] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Destroyed the VM [ 1268.287203] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Deleting Snapshot of the VM instance {{(pid=70020) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1268.287203] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-7af5eba9-216e-44f1-ad39-a725661a4871 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.295992] env[70020]: DEBUG oslo_vmware.api [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1268.295992] env[70020]: value = "task-3619292" [ 1268.295992] env[70020]: _type = "Task" [ 1268.295992] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1268.304270] env[70020]: DEBUG oslo_vmware.api [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619292, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1268.354135] env[70020]: DEBUG oslo_concurrency.lockutils [None req-99c53ca5-4986-4cf1-ab03-eb6ba989e5d8 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "refresh_cache-d28f6dff-8f9f-41d4-87ae-0ff87327d042" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1268.354409] env[70020]: DEBUG oslo_concurrency.lockutils [None req-99c53ca5-4986-4cf1-ab03-eb6ba989e5d8 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquired lock "refresh_cache-d28f6dff-8f9f-41d4-87ae-0ff87327d042" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1268.354644] env[70020]: DEBUG nova.network.neutron [None req-99c53ca5-4986-4cf1-ab03-eb6ba989e5d8 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1268.354921] env[70020]: DEBUG nova.objects.instance [None req-99c53ca5-4986-4cf1-ab03-eb6ba989e5d8 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lazy-loading 'info_cache' on Instance uuid d28f6dff-8f9f-41d4-87ae-0ff87327d042 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1268.808479] env[70020]: DEBUG oslo_vmware.api [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619292, 'name': RemoveSnapshot_Task, 'duration_secs': 0.373853} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1268.808479] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Deleted Snapshot of the VM instance {{(pid=70020) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1268.808479] env[70020]: DEBUG nova.compute.manager [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1268.808671] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e64e13b-9dff-4af1-8bc1-d32910d17748 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.829667] env[70020]: DEBUG nova.network.neutron [req-1c62fc6a-ab45-4705-810a-8c5ad93e575e req-b8b6d5cf-2f98-4582-896b-600b1257444a service nova] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Updated VIF entry in instance network info cache for port 8b1a9cab-123c-45b9-9703-a6e46606f140. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1268.830013] env[70020]: DEBUG nova.network.neutron [req-1c62fc6a-ab45-4705-810a-8c5ad93e575e req-b8b6d5cf-2f98-4582-896b-600b1257444a service nova] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Updating instance_info_cache with network_info: [{"id": "8b1a9cab-123c-45b9-9703-a6e46606f140", "address": "fa:16:3e:0b:81:cc", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b1a9cab-12", "ovs_interfaceid": "8b1a9cab-123c-45b9-9703-a6e46606f140", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1269.319731] env[70020]: INFO nova.compute.manager [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Shelve offloading [ 1269.332628] env[70020]: DEBUG oslo_concurrency.lockutils [req-1c62fc6a-ab45-4705-810a-8c5ad93e575e req-b8b6d5cf-2f98-4582-896b-600b1257444a service nova] Releasing lock "refresh_cache-4b73ae75-c403-4268-8eab-4d6c32aef950" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1269.332872] env[70020]: DEBUG nova.compute.manager [req-1c62fc6a-ab45-4705-810a-8c5ad93e575e req-b8b6d5cf-2f98-4582-896b-600b1257444a service nova] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Received event network-changed-8b1a9cab-123c-45b9-9703-a6e46606f140 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1269.333051] env[70020]: DEBUG nova.compute.manager [req-1c62fc6a-ab45-4705-810a-8c5ad93e575e req-b8b6d5cf-2f98-4582-896b-600b1257444a service nova] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Refreshing instance network info cache due to event network-changed-8b1a9cab-123c-45b9-9703-a6e46606f140. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1269.333260] env[70020]: DEBUG oslo_concurrency.lockutils [req-1c62fc6a-ab45-4705-810a-8c5ad93e575e req-b8b6d5cf-2f98-4582-896b-600b1257444a service nova] Acquiring lock "refresh_cache-4b73ae75-c403-4268-8eab-4d6c32aef950" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1269.333400] env[70020]: DEBUG oslo_concurrency.lockutils [req-1c62fc6a-ab45-4705-810a-8c5ad93e575e req-b8b6d5cf-2f98-4582-896b-600b1257444a service nova] Acquired lock "refresh_cache-4b73ae75-c403-4268-8eab-4d6c32aef950" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1269.333558] env[70020]: DEBUG nova.network.neutron [req-1c62fc6a-ab45-4705-810a-8c5ad93e575e req-b8b6d5cf-2f98-4582-896b-600b1257444a service nova] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Refreshing network info cache for port 8b1a9cab-123c-45b9-9703-a6e46606f140 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1269.553327] env[70020]: DEBUG nova.network.neutron [None req-99c53ca5-4986-4cf1-ab03-eb6ba989e5d8 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Updating instance_info_cache with network_info: [{"id": "c963937e-c9c9-452b-a0d2-b2a4314681dd", "address": "fa:16:3e:d7:7d:d0", "network": {"id": "b566519a-edda-4c57-92ca-a702e586c638", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-790170115-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74b060ffb3ac4ecd95dcd85d4744dc2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc963937e-c9", "ovs_interfaceid": "c963937e-c9c9-452b-a0d2-b2a4314681dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1269.823414] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1269.824103] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9d8f2ed9-c9a3-4c26-b101-dd3b40c47d94 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.831084] env[70020]: DEBUG oslo_vmware.api [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1269.831084] env[70020]: value = "task-3619293" [ 1269.831084] env[70020]: _type = "Task" [ 1269.831084] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.840225] env[70020]: DEBUG oslo_vmware.api [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619293, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.012916] env[70020]: DEBUG nova.network.neutron [req-1c62fc6a-ab45-4705-810a-8c5ad93e575e req-b8b6d5cf-2f98-4582-896b-600b1257444a service nova] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Updated VIF entry in instance network info cache for port 8b1a9cab-123c-45b9-9703-a6e46606f140. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1270.013299] env[70020]: DEBUG nova.network.neutron [req-1c62fc6a-ab45-4705-810a-8c5ad93e575e req-b8b6d5cf-2f98-4582-896b-600b1257444a service nova] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Updating instance_info_cache with network_info: [{"id": "8b1a9cab-123c-45b9-9703-a6e46606f140", "address": "fa:16:3e:0b:81:cc", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b1a9cab-12", "ovs_interfaceid": "8b1a9cab-123c-45b9-9703-a6e46606f140", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1270.056327] env[70020]: DEBUG oslo_concurrency.lockutils [None req-99c53ca5-4986-4cf1-ab03-eb6ba989e5d8 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Releasing lock "refresh_cache-d28f6dff-8f9f-41d4-87ae-0ff87327d042" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1270.056580] env[70020]: DEBUG nova.objects.instance [None req-99c53ca5-4986-4cf1-ab03-eb6ba989e5d8 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lazy-loading 'migration_context' on Instance uuid d28f6dff-8f9f-41d4-87ae-0ff87327d042 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1270.341344] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] VM already powered off {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1270.341709] env[70020]: DEBUG nova.compute.manager [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1270.342302] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccd6bce9-2a9b-4165-817f-40610223ac59 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.347711] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquiring lock "refresh_cache-399d55b7-2a79-4849-89b6-ff8d1c0d33e1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1270.347868] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquired lock "refresh_cache-399d55b7-2a79-4849-89b6-ff8d1c0d33e1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1270.348047] env[70020]: DEBUG nova.network.neutron [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1270.516183] env[70020]: DEBUG oslo_concurrency.lockutils [req-1c62fc6a-ab45-4705-810a-8c5ad93e575e req-b8b6d5cf-2f98-4582-896b-600b1257444a service nova] Releasing lock "refresh_cache-4b73ae75-c403-4268-8eab-4d6c32aef950" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1270.516459] env[70020]: DEBUG nova.compute.manager [req-1c62fc6a-ab45-4705-810a-8c5ad93e575e req-b8b6d5cf-2f98-4582-896b-600b1257444a service nova] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Received event network-changed-0889717d-3194-4204-a46b-57e94fc35d6c {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1270.516623] env[70020]: DEBUG nova.compute.manager [req-1c62fc6a-ab45-4705-810a-8c5ad93e575e req-b8b6d5cf-2f98-4582-896b-600b1257444a service nova] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Refreshing instance network info cache due to event network-changed-0889717d-3194-4204-a46b-57e94fc35d6c. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1270.516831] env[70020]: DEBUG oslo_concurrency.lockutils [req-1c62fc6a-ab45-4705-810a-8c5ad93e575e req-b8b6d5cf-2f98-4582-896b-600b1257444a service nova] Acquiring lock "refresh_cache-e96aae17-5ae5-404b-bbe3-46777f7c34d2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1270.517043] env[70020]: DEBUG oslo_concurrency.lockutils [req-1c62fc6a-ab45-4705-810a-8c5ad93e575e req-b8b6d5cf-2f98-4582-896b-600b1257444a service nova] Acquired lock "refresh_cache-e96aae17-5ae5-404b-bbe3-46777f7c34d2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1270.517229] env[70020]: DEBUG nova.network.neutron [req-1c62fc6a-ab45-4705-810a-8c5ad93e575e req-b8b6d5cf-2f98-4582-896b-600b1257444a service nova] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Refreshing network info cache for port 0889717d-3194-4204-a46b-57e94fc35d6c {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1270.559090] env[70020]: DEBUG nova.objects.base [None req-99c53ca5-4986-4cf1-ab03-eb6ba989e5d8 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=70020) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1270.560019] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce68c444-1821-41bf-9fd5-6f52b6b01ae6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.579802] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1e95857-1354-486e-8977-d2b9c31d826a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.585462] env[70020]: DEBUG oslo_vmware.api [None req-99c53ca5-4986-4cf1-ab03-eb6ba989e5d8 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1270.585462] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52adaa9e-c35f-348c-0c23-e28f48b5c0f1" [ 1270.585462] env[70020]: _type = "Task" [ 1270.585462] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1270.592681] env[70020]: DEBUG oslo_vmware.api [None req-99c53ca5-4986-4cf1-ab03-eb6ba989e5d8 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52adaa9e-c35f-348c-0c23-e28f48b5c0f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.035174] env[70020]: DEBUG nova.network.neutron [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Updating instance_info_cache with network_info: [{"id": "b3b2c85d-9fe6-403f-bc6d-d003d2a06aef", "address": "fa:16:3e:d5:73:92", "network": {"id": "ed5fa951-66b0-45af-8569-314b06003130", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1735933033-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11384e127368415d82f2e8a7e985b17e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "af454577-0e89-41a3-a9f2-f39716f62fd5", "external-id": "nsx-vlan-transportzone-63", "segmentation_id": 63, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3b2c85d-9f", "ovs_interfaceid": "b3b2c85d-9fe6-403f-bc6d-d003d2a06aef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1271.096721] env[70020]: DEBUG oslo_vmware.api [None req-99c53ca5-4986-4cf1-ab03-eb6ba989e5d8 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52adaa9e-c35f-348c-0c23-e28f48b5c0f1, 'name': SearchDatastore_Task, 'duration_secs': 0.007144} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.097019] env[70020]: DEBUG oslo_concurrency.lockutils [None req-99c53ca5-4986-4cf1-ab03-eb6ba989e5d8 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1271.097275] env[70020]: DEBUG oslo_concurrency.lockutils [None req-99c53ca5-4986-4cf1-ab03-eb6ba989e5d8 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1271.135405] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1271.228515] env[70020]: DEBUG nova.network.neutron [req-1c62fc6a-ab45-4705-810a-8c5ad93e575e req-b8b6d5cf-2f98-4582-896b-600b1257444a service nova] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Updated VIF entry in instance network info cache for port 0889717d-3194-4204-a46b-57e94fc35d6c. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1271.228882] env[70020]: DEBUG nova.network.neutron [req-1c62fc6a-ab45-4705-810a-8c5ad93e575e req-b8b6d5cf-2f98-4582-896b-600b1257444a service nova] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Updating instance_info_cache with network_info: [{"id": "0889717d-3194-4204-a46b-57e94fc35d6c", "address": "fa:16:3e:ed:64:03", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0889717d-31", "ovs_interfaceid": "0889717d-3194-4204-a46b-57e94fc35d6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1271.537922] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Releasing lock "refresh_cache-399d55b7-2a79-4849-89b6-ff8d1c0d33e1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1271.719016] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ca6dbb9-1ee3-4b01-b5e9-1c25e2b3a6db {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.724994] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eab2d67a-6aa9-46ae-911d-fda6ea2c254d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.731084] env[70020]: DEBUG oslo_concurrency.lockutils [req-1c62fc6a-ab45-4705-810a-8c5ad93e575e req-b8b6d5cf-2f98-4582-896b-600b1257444a service nova] Releasing lock "refresh_cache-e96aae17-5ae5-404b-bbe3-46777f7c34d2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1271.756846] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d267d10b-09d6-4e06-b4bc-3541217589f1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.764888] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbc8785c-32e9-4ce9-a17f-369fc4c3219a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.778745] env[70020]: DEBUG nova.compute.provider_tree [None req-99c53ca5-4986-4cf1-ab03-eb6ba989e5d8 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1271.804167] env[70020]: DEBUG nova.compute.manager [req-5c763c4b-f6fa-4950-bb3a-f41ae316128b req-e1fb7fea-4469-4dc8-9a7d-363a734daa60 service nova] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Received event network-vif-unplugged-b3b2c85d-9fe6-403f-bc6d-d003d2a06aef {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1271.804167] env[70020]: DEBUG oslo_concurrency.lockutils [req-5c763c4b-f6fa-4950-bb3a-f41ae316128b req-e1fb7fea-4469-4dc8-9a7d-363a734daa60 service nova] Acquiring lock "399d55b7-2a79-4849-89b6-ff8d1c0d33e1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1271.804167] env[70020]: DEBUG oslo_concurrency.lockutils [req-5c763c4b-f6fa-4950-bb3a-f41ae316128b req-e1fb7fea-4469-4dc8-9a7d-363a734daa60 service nova] Lock "399d55b7-2a79-4849-89b6-ff8d1c0d33e1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1271.804167] env[70020]: DEBUG oslo_concurrency.lockutils [req-5c763c4b-f6fa-4950-bb3a-f41ae316128b req-e1fb7fea-4469-4dc8-9a7d-363a734daa60 service nova] Lock "399d55b7-2a79-4849-89b6-ff8d1c0d33e1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1271.804167] env[70020]: DEBUG nova.compute.manager [req-5c763c4b-f6fa-4950-bb3a-f41ae316128b req-e1fb7fea-4469-4dc8-9a7d-363a734daa60 service nova] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] No waiting events found dispatching network-vif-unplugged-b3b2c85d-9fe6-403f-bc6d-d003d2a06aef {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1271.804167] env[70020]: WARNING nova.compute.manager [req-5c763c4b-f6fa-4950-bb3a-f41ae316128b req-e1fb7fea-4469-4dc8-9a7d-363a734daa60 service nova] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Received unexpected event network-vif-unplugged-b3b2c85d-9fe6-403f-bc6d-d003d2a06aef for instance with vm_state shelved and task_state shelving_offloading. [ 1271.896138] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1271.897144] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e529576-8daf-4148-adcb-69f55fc98370 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.904554] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1271.904802] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-419084c0-7188-422e-bc8a-ffd5bfe22ab4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.975604] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1271.975815] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1271.975996] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Deleting the datastore file [datastore1] 399d55b7-2a79-4849-89b6-ff8d1c0d33e1 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1271.976277] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3e934545-a1e5-40c7-b895-1255a4e4efe2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.983604] env[70020]: DEBUG oslo_vmware.api [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1271.983604] env[70020]: value = "task-3619295" [ 1271.983604] env[70020]: _type = "Task" [ 1271.983604] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.990826] env[70020]: DEBUG oslo_vmware.api [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619295, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.282705] env[70020]: DEBUG nova.scheduler.client.report [None req-99c53ca5-4986-4cf1-ab03-eb6ba989e5d8 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1272.493816] env[70020]: DEBUG oslo_vmware.api [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619295, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.126173} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1272.494197] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1272.494489] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1272.494740] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1272.515164] env[70020]: INFO nova.scheduler.client.report [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Deleted allocations for instance 399d55b7-2a79-4849-89b6-ff8d1c0d33e1 [ 1273.019989] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1273.130656] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1273.292572] env[70020]: DEBUG oslo_concurrency.lockutils [None req-99c53ca5-4986-4cf1-ab03-eb6ba989e5d8 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.195s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1273.295375] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.276s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1273.295611] env[70020]: DEBUG nova.objects.instance [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lazy-loading 'resources' on Instance uuid 399d55b7-2a79-4849-89b6-ff8d1c0d33e1 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1273.800836] env[70020]: DEBUG nova.objects.instance [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lazy-loading 'numa_topology' on Instance uuid 399d55b7-2a79-4849-89b6-ff8d1c0d33e1 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1273.830415] env[70020]: DEBUG nova.compute.manager [req-0977212a-1ec5-4fd6-91f6-706579f085e5 req-c5ab8573-72a9-4b2b-8715-139f0f87ce58 service nova] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Received event network-changed-b3b2c85d-9fe6-403f-bc6d-d003d2a06aef {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1273.830415] env[70020]: DEBUG nova.compute.manager [req-0977212a-1ec5-4fd6-91f6-706579f085e5 req-c5ab8573-72a9-4b2b-8715-139f0f87ce58 service nova] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Refreshing instance network info cache due to event network-changed-b3b2c85d-9fe6-403f-bc6d-d003d2a06aef. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1273.831038] env[70020]: DEBUG oslo_concurrency.lockutils [req-0977212a-1ec5-4fd6-91f6-706579f085e5 req-c5ab8573-72a9-4b2b-8715-139f0f87ce58 service nova] Acquiring lock "refresh_cache-399d55b7-2a79-4849-89b6-ff8d1c0d33e1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1273.831038] env[70020]: DEBUG oslo_concurrency.lockutils [req-0977212a-1ec5-4fd6-91f6-706579f085e5 req-c5ab8573-72a9-4b2b-8715-139f0f87ce58 service nova] Acquired lock "refresh_cache-399d55b7-2a79-4849-89b6-ff8d1c0d33e1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1273.831038] env[70020]: DEBUG nova.network.neutron [req-0977212a-1ec5-4fd6-91f6-706579f085e5 req-c5ab8573-72a9-4b2b-8715-139f0f87ce58 service nova] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Refreshing network info cache for port b3b2c85d-9fe6-403f-bc6d-d003d2a06aef {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1273.844780] env[70020]: INFO nova.scheduler.client.report [None req-99c53ca5-4986-4cf1-ab03-eb6ba989e5d8 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Deleted allocation for migration 19e094b8-1dbe-457c-8db7-7f0a15bb22db [ 1274.157891] env[70020]: INFO nova.compute.manager [None req-3e9a3c46-60f1-40c6-bfae-658577b8daee tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Get console output [ 1274.158239] env[70020]: WARNING nova.virt.vmwareapi.driver [None req-3e9a3c46-60f1-40c6-bfae-658577b8daee tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] The console log is missing. Check your VSPC configuration [ 1274.234641] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquiring lock "399d55b7-2a79-4849-89b6-ff8d1c0d33e1" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1274.304544] env[70020]: DEBUG nova.objects.base [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Object Instance<399d55b7-2a79-4849-89b6-ff8d1c0d33e1> lazy-loaded attributes: resources,numa_topology {{(pid=70020) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1274.350099] env[70020]: DEBUG oslo_concurrency.lockutils [None req-99c53ca5-4986-4cf1-ab03-eb6ba989e5d8 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "d28f6dff-8f9f-41d4-87ae-0ff87327d042" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.562s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1274.397662] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-750a21ef-2941-4b84-84fd-44f0dc872838 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.408517] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26b04821-ce30-48b9-a42a-48a757ba1e64 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.442591] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7ccd66b-cd15-46b2-a455-5830545c888c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.449783] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-521409ca-4325-47fd-8fdc-1f4e7453618d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.462729] env[70020]: DEBUG nova.compute.provider_tree [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1274.547927] env[70020]: DEBUG nova.network.neutron [req-0977212a-1ec5-4fd6-91f6-706579f085e5 req-c5ab8573-72a9-4b2b-8715-139f0f87ce58 service nova] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Updated VIF entry in instance network info cache for port b3b2c85d-9fe6-403f-bc6d-d003d2a06aef. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1274.548318] env[70020]: DEBUG nova.network.neutron [req-0977212a-1ec5-4fd6-91f6-706579f085e5 req-c5ab8573-72a9-4b2b-8715-139f0f87ce58 service nova] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Updating instance_info_cache with network_info: [{"id": "b3b2c85d-9fe6-403f-bc6d-d003d2a06aef", "address": "fa:16:3e:d5:73:92", "network": {"id": "ed5fa951-66b0-45af-8569-314b06003130", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1735933033-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11384e127368415d82f2e8a7e985b17e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapb3b2c85d-9f", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1274.967814] env[70020]: DEBUG nova.scheduler.client.report [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1275.050535] env[70020]: DEBUG oslo_concurrency.lockutils [req-0977212a-1ec5-4fd6-91f6-706579f085e5 req-c5ab8573-72a9-4b2b-8715-139f0f87ce58 service nova] Releasing lock "refresh_cache-399d55b7-2a79-4849-89b6-ff8d1c0d33e1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1275.472868] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.177s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1275.979561] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3a4d1e32-e6b0-4dfd-aec9-7de8565f22be tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lock "399d55b7-2a79-4849-89b6-ff8d1c0d33e1" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 23.385s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1275.980236] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lock "399d55b7-2a79-4849-89b6-ff8d1c0d33e1" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.746s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1275.980454] env[70020]: INFO nova.compute.manager [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Unshelving [ 1276.993063] env[70020]: DEBUG nova.compute.utils [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1277.496456] env[70020]: INFO nova.virt.block_device [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Booting with volume fd719fc4-e0af-4335-b966-f7758bcc701e at /dev/sdb [ 1277.531024] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4f7609b4-a3f2-4fd7-85c7-682fbf5a19a9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.539579] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8d80aea-1543-46e9-8977-3c99657b4685 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.568131] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-08cfe148-d8c4-4752-a69e-198b147c5e5d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.576830] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c0dbd18-86ff-4ef4-b46f-1a5da7452890 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.602995] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c293c54f-8e4b-4447-9b79-e7dfed59c981 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.609291] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca0415be-e6a4-4ee2-86f1-2ec9264c4c80 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.621907] env[70020]: DEBUG nova.virt.block_device [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Updating existing volume attachment record: c03d15bb-a0f3-4c7b-b1df-017e467d1a12 {{(pid=70020) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1283.215783] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1283.216082] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1283.216280] env[70020]: DEBUG nova.objects.instance [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lazy-loading 'pci_requests' on Instance uuid 399d55b7-2a79-4849-89b6-ff8d1c0d33e1 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1283.720600] env[70020]: DEBUG nova.objects.instance [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lazy-loading 'numa_topology' on Instance uuid 399d55b7-2a79-4849-89b6-ff8d1c0d33e1 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1284.223532] env[70020]: INFO nova.compute.claims [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1285.316670] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9f2b1f0-043a-4f59-8e62-eaa5c2a0608e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.324240] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bec2067-e51e-4de6-9fce-e52de0089bfa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.353957] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07f2c21e-ba0d-48e3-968d-8fc12141acc5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.360771] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c91fda0-2f95-4e08-b3a6-1aef5f444dcb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.373963] env[70020]: DEBUG nova.compute.provider_tree [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1285.877019] env[70020]: DEBUG nova.scheduler.client.report [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1286.382573] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.166s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1286.419186] env[70020]: INFO nova.network.neutron [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Updating port b3b2c85d-9fe6-403f-bc6d-d003d2a06aef with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1287.803419] env[70020]: DEBUG nova.compute.manager [req-b821c7d9-73f4-4996-bfca-bda4b99a4b2f req-ca2cafa4-1688-4ed7-9c0a-134c725cdb0f service nova] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Received event network-vif-plugged-b3b2c85d-9fe6-403f-bc6d-d003d2a06aef {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1287.803675] env[70020]: DEBUG oslo_concurrency.lockutils [req-b821c7d9-73f4-4996-bfca-bda4b99a4b2f req-ca2cafa4-1688-4ed7-9c0a-134c725cdb0f service nova] Acquiring lock "399d55b7-2a79-4849-89b6-ff8d1c0d33e1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1287.803827] env[70020]: DEBUG oslo_concurrency.lockutils [req-b821c7d9-73f4-4996-bfca-bda4b99a4b2f req-ca2cafa4-1688-4ed7-9c0a-134c725cdb0f service nova] Lock "399d55b7-2a79-4849-89b6-ff8d1c0d33e1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1287.803988] env[70020]: DEBUG oslo_concurrency.lockutils [req-b821c7d9-73f4-4996-bfca-bda4b99a4b2f req-ca2cafa4-1688-4ed7-9c0a-134c725cdb0f service nova] Lock "399d55b7-2a79-4849-89b6-ff8d1c0d33e1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1287.804606] env[70020]: DEBUG nova.compute.manager [req-b821c7d9-73f4-4996-bfca-bda4b99a4b2f req-ca2cafa4-1688-4ed7-9c0a-134c725cdb0f service nova] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] No waiting events found dispatching network-vif-plugged-b3b2c85d-9fe6-403f-bc6d-d003d2a06aef {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1287.804778] env[70020]: WARNING nova.compute.manager [req-b821c7d9-73f4-4996-bfca-bda4b99a4b2f req-ca2cafa4-1688-4ed7-9c0a-134c725cdb0f service nova] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Received unexpected event network-vif-plugged-b3b2c85d-9fe6-403f-bc6d-d003d2a06aef for instance with vm_state shelved_offloaded and task_state spawning. [ 1287.886434] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquiring lock "refresh_cache-399d55b7-2a79-4849-89b6-ff8d1c0d33e1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1287.886620] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquired lock "refresh_cache-399d55b7-2a79-4849-89b6-ff8d1c0d33e1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1287.886817] env[70020]: DEBUG nova.network.neutron [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1288.572775] env[70020]: DEBUG nova.network.neutron [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Updating instance_info_cache with network_info: [{"id": "b3b2c85d-9fe6-403f-bc6d-d003d2a06aef", "address": "fa:16:3e:d5:73:92", "network": {"id": "ed5fa951-66b0-45af-8569-314b06003130", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1735933033-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11384e127368415d82f2e8a7e985b17e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "af454577-0e89-41a3-a9f2-f39716f62fd5", "external-id": "nsx-vlan-transportzone-63", "segmentation_id": 63, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3b2c85d-9f", "ovs_interfaceid": "b3b2c85d-9fe6-403f-bc6d-d003d2a06aef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1288.793664] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b9334bc6-b219-47a1-99cc-20c1f4020f94 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "845ea37a-9945-49cd-a1bd-3da91f4af16b" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1288.793909] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b9334bc6-b219-47a1-99cc-20c1f4020f94 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "845ea37a-9945-49cd-a1bd-3da91f4af16b" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1288.794199] env[70020]: DEBUG nova.compute.manager [None req-b9334bc6-b219-47a1-99cc-20c1f4020f94 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1288.795111] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32e00f8e-a1a6-4f9b-8094-c2cbbf17d3fb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.802136] env[70020]: DEBUG nova.compute.manager [None req-b9334bc6-b219-47a1-99cc-20c1f4020f94 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=70020) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1288.802699] env[70020]: DEBUG nova.objects.instance [None req-b9334bc6-b219-47a1-99cc-20c1f4020f94 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lazy-loading 'flavor' on Instance uuid 845ea37a-9945-49cd-a1bd-3da91f4af16b {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1289.074907] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Releasing lock "refresh_cache-399d55b7-2a79-4849-89b6-ff8d1c0d33e1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1289.100692] env[70020]: DEBUG nova.virt.hardware [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='71b5aa5e4b3661036c9f49ffc2620c0a',container_format='bare',created_at=2025-04-25T23:11:53Z,direct_url=,disk_format='vmdk',id=1824125a-6f02-476b-8b5c-9899bfa470d7,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1622705399-shelved',owner='11384e127368415d82f2e8a7e985b17e',properties=ImageMetaProps,protected=,size=31666176,status='active',tags=,updated_at=2025-04-25T23:12:09Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1289.100940] env[70020]: DEBUG nova.virt.hardware [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1289.101114] env[70020]: DEBUG nova.virt.hardware [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1289.101298] env[70020]: DEBUG nova.virt.hardware [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1289.101443] env[70020]: DEBUG nova.virt.hardware [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1289.101585] env[70020]: DEBUG nova.virt.hardware [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1289.101783] env[70020]: DEBUG nova.virt.hardware [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1289.101935] env[70020]: DEBUG nova.virt.hardware [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1289.102464] env[70020]: DEBUG nova.virt.hardware [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1289.102814] env[70020]: DEBUG nova.virt.hardware [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1289.103051] env[70020]: DEBUG nova.virt.hardware [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1289.103881] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bd56e62-196c-4194-bb7e-43893b7bdfdd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.111777] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fbbe3d8-9935-461c-ba34-79b6d3a1833e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.124654] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d5:73:92', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'af454577-0e89-41a3-a9f2-f39716f62fd5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b3b2c85d-9fe6-403f-bc6d-d003d2a06aef', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1289.131916] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1289.132153] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1289.132345] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-44343610-981c-4438-b7f4-f79c3aad55ae {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.150540] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1289.150540] env[70020]: value = "task-3619300" [ 1289.150540] env[70020]: _type = "Task" [ 1289.150540] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.157378] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619300, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.660593] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619300, 'name': CreateVM_Task, 'duration_secs': 0.275622} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.660776] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1289.661380] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1824125a-6f02-476b-8b5c-9899bfa470d7" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1289.661551] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1824125a-6f02-476b-8b5c-9899bfa470d7" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1289.662010] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1824125a-6f02-476b-8b5c-9899bfa470d7" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1289.662274] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2004404-1586-489c-b1e4-5d0af8941964 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.666570] env[70020]: DEBUG oslo_vmware.api [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1289.666570] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52544a8b-554a-6d63-3386-ec0ee75c1f01" [ 1289.666570] env[70020]: _type = "Task" [ 1289.666570] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.676115] env[70020]: DEBUG oslo_vmware.api [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52544a8b-554a-6d63-3386-ec0ee75c1f01, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.810953] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9334bc6-b219-47a1-99cc-20c1f4020f94 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1289.811322] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-19be0c9e-de2c-465a-a2d2-3fe234ef06ce {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.818933] env[70020]: DEBUG oslo_vmware.api [None req-b9334bc6-b219-47a1-99cc-20c1f4020f94 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1289.818933] env[70020]: value = "task-3619301" [ 1289.818933] env[70020]: _type = "Task" [ 1289.818933] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.826965] env[70020]: DEBUG oslo_vmware.api [None req-b9334bc6-b219-47a1-99cc-20c1f4020f94 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619301, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.829391] env[70020]: DEBUG nova.compute.manager [req-c0f53ff8-f7fe-4b09-af3c-52e853cc2169 req-f7d068de-346e-4273-b764-9fae01e78992 service nova] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Received event network-changed-b3b2c85d-9fe6-403f-bc6d-d003d2a06aef {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1289.829595] env[70020]: DEBUG nova.compute.manager [req-c0f53ff8-f7fe-4b09-af3c-52e853cc2169 req-f7d068de-346e-4273-b764-9fae01e78992 service nova] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Refreshing instance network info cache due to event network-changed-b3b2c85d-9fe6-403f-bc6d-d003d2a06aef. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1289.829805] env[70020]: DEBUG oslo_concurrency.lockutils [req-c0f53ff8-f7fe-4b09-af3c-52e853cc2169 req-f7d068de-346e-4273-b764-9fae01e78992 service nova] Acquiring lock "refresh_cache-399d55b7-2a79-4849-89b6-ff8d1c0d33e1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1289.829963] env[70020]: DEBUG oslo_concurrency.lockutils [req-c0f53ff8-f7fe-4b09-af3c-52e853cc2169 req-f7d068de-346e-4273-b764-9fae01e78992 service nova] Acquired lock "refresh_cache-399d55b7-2a79-4849-89b6-ff8d1c0d33e1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1289.830168] env[70020]: DEBUG nova.network.neutron [req-c0f53ff8-f7fe-4b09-af3c-52e853cc2169 req-f7d068de-346e-4273-b764-9fae01e78992 service nova] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Refreshing network info cache for port b3b2c85d-9fe6-403f-bc6d-d003d2a06aef {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1290.015605] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c43208bd-b2d8-4da5-be22-ed9f623e45d0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "interface-e96aae17-5ae5-404b-bbe3-46777f7c34d2-b72ae1ea-1ffa-4b9c-a6eb-103a9e5ed0ae" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1290.015875] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c43208bd-b2d8-4da5-be22-ed9f623e45d0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "interface-e96aae17-5ae5-404b-bbe3-46777f7c34d2-b72ae1ea-1ffa-4b9c-a6eb-103a9e5ed0ae" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1290.016269] env[70020]: DEBUG nova.objects.instance [None req-c43208bd-b2d8-4da5-be22-ed9f623e45d0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lazy-loading 'flavor' on Instance uuid e96aae17-5ae5-404b-bbe3-46777f7c34d2 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1290.177010] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1824125a-6f02-476b-8b5c-9899bfa470d7" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1290.177403] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Processing image 1824125a-6f02-476b-8b5c-9899bfa470d7 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1290.177532] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1824125a-6f02-476b-8b5c-9899bfa470d7/1824125a-6f02-476b-8b5c-9899bfa470d7.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1290.177676] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1824125a-6f02-476b-8b5c-9899bfa470d7/1824125a-6f02-476b-8b5c-9899bfa470d7.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1290.177852] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1290.178111] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b7dfd2dd-8619-4c08-a4fe-495706fed7ec {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.186518] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1290.186682] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1290.187393] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98585229-0dd4-4a1c-aaf0-7b392e8243f3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.192395] env[70020]: DEBUG oslo_vmware.api [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1290.192395] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52772f0e-2858-7b89-594f-68b2f2d37838" [ 1290.192395] env[70020]: _type = "Task" [ 1290.192395] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.200343] env[70020]: DEBUG oslo_vmware.api [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52772f0e-2858-7b89-594f-68b2f2d37838, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.328645] env[70020]: DEBUG oslo_vmware.api [None req-b9334bc6-b219-47a1-99cc-20c1f4020f94 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619301, 'name': PowerOffVM_Task, 'duration_secs': 0.170261} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.328908] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9334bc6-b219-47a1-99cc-20c1f4020f94 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1290.329134] env[70020]: DEBUG nova.compute.manager [None req-b9334bc6-b219-47a1-99cc-20c1f4020f94 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1290.329839] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7830784b-89b5-4bbf-b29c-b9a61f88d434 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.519421] env[70020]: DEBUG nova.network.neutron [req-c0f53ff8-f7fe-4b09-af3c-52e853cc2169 req-f7d068de-346e-4273-b764-9fae01e78992 service nova] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Updated VIF entry in instance network info cache for port b3b2c85d-9fe6-403f-bc6d-d003d2a06aef. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1290.519796] env[70020]: DEBUG nova.network.neutron [req-c0f53ff8-f7fe-4b09-af3c-52e853cc2169 req-f7d068de-346e-4273-b764-9fae01e78992 service nova] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Updating instance_info_cache with network_info: [{"id": "b3b2c85d-9fe6-403f-bc6d-d003d2a06aef", "address": "fa:16:3e:d5:73:92", "network": {"id": "ed5fa951-66b0-45af-8569-314b06003130", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1735933033-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11384e127368415d82f2e8a7e985b17e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "af454577-0e89-41a3-a9f2-f39716f62fd5", "external-id": "nsx-vlan-transportzone-63", "segmentation_id": 63, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3b2c85d-9f", "ovs_interfaceid": "b3b2c85d-9fe6-403f-bc6d-d003d2a06aef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1290.635661] env[70020]: DEBUG nova.objects.instance [None req-c43208bd-b2d8-4da5-be22-ed9f623e45d0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lazy-loading 'pci_requests' on Instance uuid e96aae17-5ae5-404b-bbe3-46777f7c34d2 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1290.703478] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Preparing fetch location {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1290.703727] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Fetch image to [datastore2] OSTACK_IMG_67d2586c-a85b-4870-b0c4-6b14cc731a38/OSTACK_IMG_67d2586c-a85b-4870-b0c4-6b14cc731a38.vmdk {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1290.703906] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Downloading stream optimized image 1824125a-6f02-476b-8b5c-9899bfa470d7 to [datastore2] OSTACK_IMG_67d2586c-a85b-4870-b0c4-6b14cc731a38/OSTACK_IMG_67d2586c-a85b-4870-b0c4-6b14cc731a38.vmdk on the data store datastore2 as vApp {{(pid=70020) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1290.704088] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Downloading image file data 1824125a-6f02-476b-8b5c-9899bfa470d7 to the ESX as VM named 'OSTACK_IMG_67d2586c-a85b-4870-b0c4-6b14cc731a38' {{(pid=70020) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1290.769515] env[70020]: DEBUG oslo_vmware.rw_handles [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1290.769515] env[70020]: value = "resgroup-9" [ 1290.769515] env[70020]: _type = "ResourcePool" [ 1290.769515] env[70020]: }. {{(pid=70020) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1290.769749] env[70020]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-cf5ce27f-8e3e-4348-b43c-950dd4adf9b4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.790546] env[70020]: DEBUG oslo_vmware.rw_handles [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lease: (returnval){ [ 1290.790546] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5263264a-9154-c1be-4eaf-b0f0c638874f" [ 1290.790546] env[70020]: _type = "HttpNfcLease" [ 1290.790546] env[70020]: } obtained for vApp import into resource pool (val){ [ 1290.790546] env[70020]: value = "resgroup-9" [ 1290.790546] env[70020]: _type = "ResourcePool" [ 1290.790546] env[70020]: }. {{(pid=70020) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1290.790822] env[70020]: DEBUG oslo_vmware.api [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the lease: (returnval){ [ 1290.790822] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5263264a-9154-c1be-4eaf-b0f0c638874f" [ 1290.790822] env[70020]: _type = "HttpNfcLease" [ 1290.790822] env[70020]: } to be ready. {{(pid=70020) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1290.796800] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1290.796800] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5263264a-9154-c1be-4eaf-b0f0c638874f" [ 1290.796800] env[70020]: _type = "HttpNfcLease" [ 1290.796800] env[70020]: } is initializing. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1290.842429] env[70020]: DEBUG oslo_concurrency.lockutils [None req-b9334bc6-b219-47a1-99cc-20c1f4020f94 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "845ea37a-9945-49cd-a1bd-3da91f4af16b" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.048s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1291.024530] env[70020]: DEBUG oslo_concurrency.lockutils [req-c0f53ff8-f7fe-4b09-af3c-52e853cc2169 req-f7d068de-346e-4273-b764-9fae01e78992 service nova] Releasing lock "refresh_cache-399d55b7-2a79-4849-89b6-ff8d1c0d33e1" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1291.138148] env[70020]: DEBUG nova.objects.base [None req-c43208bd-b2d8-4da5-be22-ed9f623e45d0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=70020) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1291.138377] env[70020]: DEBUG nova.network.neutron [None req-c43208bd-b2d8-4da5-be22-ed9f623e45d0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1291.186344] env[70020]: DEBUG nova.objects.instance [None req-454a88bd-c253-48d2-974a-54b8eb78297d tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lazy-loading 'flavor' on Instance uuid 845ea37a-9945-49cd-a1bd-3da91f4af16b {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1291.204621] env[70020]: DEBUG nova.policy [None req-c43208bd-b2d8-4da5-be22-ed9f623e45d0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '914fc4078a214da891e7d12d242504cb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0573da12f56f4b18a103e4e9fdfb9c19', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1291.298577] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1291.298577] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5263264a-9154-c1be-4eaf-b0f0c638874f" [ 1291.298577] env[70020]: _type = "HttpNfcLease" [ 1291.298577] env[70020]: } is ready. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1291.298873] env[70020]: DEBUG oslo_vmware.rw_handles [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1291.298873] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5263264a-9154-c1be-4eaf-b0f0c638874f" [ 1291.298873] env[70020]: _type = "HttpNfcLease" [ 1291.298873] env[70020]: }. {{(pid=70020) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1291.299575] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eee40f1-c3b5-42b1-b7f9-61c07faed061 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.306368] env[70020]: DEBUG oslo_vmware.rw_handles [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d1c1de-925d-51a5-159d-448eb319454b/disk-0.vmdk from lease info. {{(pid=70020) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1291.306535] env[70020]: DEBUG oslo_vmware.rw_handles [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Creating HTTP connection to write to file with size = 31666176 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d1c1de-925d-51a5-159d-448eb319454b/disk-0.vmdk. {{(pid=70020) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1291.370182] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-5888ca2b-51c2-4cf4-96d0-3532b6b7b4bf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.691582] env[70020]: DEBUG oslo_concurrency.lockutils [None req-454a88bd-c253-48d2-974a-54b8eb78297d tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "refresh_cache-845ea37a-9945-49cd-a1bd-3da91f4af16b" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1291.691814] env[70020]: DEBUG oslo_concurrency.lockutils [None req-454a88bd-c253-48d2-974a-54b8eb78297d tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquired lock "refresh_cache-845ea37a-9945-49cd-a1bd-3da91f4af16b" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1291.691944] env[70020]: DEBUG nova.network.neutron [None req-454a88bd-c253-48d2-974a-54b8eb78297d tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1291.692131] env[70020]: DEBUG nova.objects.instance [None req-454a88bd-c253-48d2-974a-54b8eb78297d tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lazy-loading 'info_cache' on Instance uuid 845ea37a-9945-49cd-a1bd-3da91f4af16b {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1292.195905] env[70020]: DEBUG nova.objects.base [None req-454a88bd-c253-48d2-974a-54b8eb78297d tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Object Instance<845ea37a-9945-49cd-a1bd-3da91f4af16b> lazy-loaded attributes: flavor,info_cache {{(pid=70020) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1292.492548] env[70020]: DEBUG oslo_vmware.rw_handles [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Completed reading data from the image iterator. {{(pid=70020) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1292.492759] env[70020]: DEBUG oslo_vmware.rw_handles [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d1c1de-925d-51a5-159d-448eb319454b/disk-0.vmdk. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1292.493798] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f35af49-70bc-42bf-967e-3c2c9011a8a0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.500945] env[70020]: DEBUG oslo_vmware.rw_handles [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d1c1de-925d-51a5-159d-448eb319454b/disk-0.vmdk is in state: ready. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1292.501196] env[70020]: DEBUG oslo_vmware.rw_handles [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d1c1de-925d-51a5-159d-448eb319454b/disk-0.vmdk. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1292.501508] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-80f8d8b8-5707-4edb-a208-f5de12f713eb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.562736] env[70020]: DEBUG nova.compute.manager [req-d4d80ce2-8f85-48a1-8769-a25054585264 req-440cd578-b1fd-43ef-9d9f-7f4125edc530 service nova] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Received event network-vif-plugged-b72ae1ea-1ffa-4b9c-a6eb-103a9e5ed0ae {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1292.562970] env[70020]: DEBUG oslo_concurrency.lockutils [req-d4d80ce2-8f85-48a1-8769-a25054585264 req-440cd578-b1fd-43ef-9d9f-7f4125edc530 service nova] Acquiring lock "e96aae17-5ae5-404b-bbe3-46777f7c34d2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1292.563333] env[70020]: DEBUG oslo_concurrency.lockutils [req-d4d80ce2-8f85-48a1-8769-a25054585264 req-440cd578-b1fd-43ef-9d9f-7f4125edc530 service nova] Lock "e96aae17-5ae5-404b-bbe3-46777f7c34d2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1292.563440] env[70020]: DEBUG oslo_concurrency.lockutils [req-d4d80ce2-8f85-48a1-8769-a25054585264 req-440cd578-b1fd-43ef-9d9f-7f4125edc530 service nova] Lock "e96aae17-5ae5-404b-bbe3-46777f7c34d2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1292.563502] env[70020]: DEBUG nova.compute.manager [req-d4d80ce2-8f85-48a1-8769-a25054585264 req-440cd578-b1fd-43ef-9d9f-7f4125edc530 service nova] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] No waiting events found dispatching network-vif-plugged-b72ae1ea-1ffa-4b9c-a6eb-103a9e5ed0ae {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1292.563664] env[70020]: WARNING nova.compute.manager [req-d4d80ce2-8f85-48a1-8769-a25054585264 req-440cd578-b1fd-43ef-9d9f-7f4125edc530 service nova] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Received unexpected event network-vif-plugged-b72ae1ea-1ffa-4b9c-a6eb-103a9e5ed0ae for instance with vm_state active and task_state None. [ 1292.642877] env[70020]: DEBUG nova.network.neutron [None req-c43208bd-b2d8-4da5-be22-ed9f623e45d0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Successfully updated port: b72ae1ea-1ffa-4b9c-a6eb-103a9e5ed0ae {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1292.703833] env[70020]: DEBUG oslo_vmware.rw_handles [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d1c1de-925d-51a5-159d-448eb319454b/disk-0.vmdk. {{(pid=70020) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1292.704082] env[70020]: INFO nova.virt.vmwareapi.images [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Downloaded image file data 1824125a-6f02-476b-8b5c-9899bfa470d7 [ 1292.704862] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6855fc8a-5ef2-4cd2-af68-fce3aa2fccce {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.722331] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b36af180-ac12-4c21-a258-624d7a1abee8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.744718] env[70020]: INFO nova.virt.vmwareapi.images [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] The imported VM was unregistered [ 1292.747140] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Caching image {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1292.747407] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Creating directory with path [datastore2] devstack-image-cache_base/1824125a-6f02-476b-8b5c-9899bfa470d7 {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1292.747693] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5719bf33-189e-47af-9a45-693389dec873 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.758062] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Created directory with path [datastore2] devstack-image-cache_base/1824125a-6f02-476b-8b5c-9899bfa470d7 {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1292.758241] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_67d2586c-a85b-4870-b0c4-6b14cc731a38/OSTACK_IMG_67d2586c-a85b-4870-b0c4-6b14cc731a38.vmdk to [datastore2] devstack-image-cache_base/1824125a-6f02-476b-8b5c-9899bfa470d7/1824125a-6f02-476b-8b5c-9899bfa470d7.vmdk. {{(pid=70020) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1292.758508] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-273f1617-0bda-43bb-80d9-0c6edce4fe36 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.767174] env[70020]: DEBUG oslo_vmware.api [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1292.767174] env[70020]: value = "task-3619304" [ 1292.767174] env[70020]: _type = "Task" [ 1292.767174] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1292.774286] env[70020]: DEBUG oslo_vmware.api [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619304, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.896696] env[70020]: DEBUG nova.network.neutron [None req-454a88bd-c253-48d2-974a-54b8eb78297d tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Updating instance_info_cache with network_info: [{"id": "f4583380-5208-4372-ab67-cc6b64a287d2", "address": "fa:16:3e:29:f4:ad", "network": {"id": "ba2c4d3c-4191-4de5-8533-90ca5dfc1dc0", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-599216784-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e3eae740ef84ef88aef113ed4d6e57b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4583380-52", "ovs_interfaceid": "f4583380-5208-4372-ab67-cc6b64a287d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1293.145370] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c43208bd-b2d8-4da5-be22-ed9f623e45d0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "refresh_cache-e96aae17-5ae5-404b-bbe3-46777f7c34d2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1293.145667] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c43208bd-b2d8-4da5-be22-ed9f623e45d0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquired lock "refresh_cache-e96aae17-5ae5-404b-bbe3-46777f7c34d2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1293.145886] env[70020]: DEBUG nova.network.neutron [None req-c43208bd-b2d8-4da5-be22-ed9f623e45d0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1293.277978] env[70020]: DEBUG oslo_vmware.api [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619304, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.400070] env[70020]: DEBUG oslo_concurrency.lockutils [None req-454a88bd-c253-48d2-974a-54b8eb78297d tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Releasing lock "refresh_cache-845ea37a-9945-49cd-a1bd-3da91f4af16b" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1293.681116] env[70020]: WARNING nova.network.neutron [None req-c43208bd-b2d8-4da5-be22-ed9f623e45d0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] 95372772-c776-417b-938a-f27c0d43d6ec already exists in list: networks containing: ['95372772-c776-417b-938a-f27c0d43d6ec']. ignoring it [ 1293.778023] env[70020]: DEBUG oslo_vmware.api [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619304, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.971326] env[70020]: DEBUG nova.network.neutron [None req-c43208bd-b2d8-4da5-be22-ed9f623e45d0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Updating instance_info_cache with network_info: [{"id": "0889717d-3194-4204-a46b-57e94fc35d6c", "address": "fa:16:3e:ed:64:03", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0889717d-31", "ovs_interfaceid": "0889717d-3194-4204-a46b-57e94fc35d6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b72ae1ea-1ffa-4b9c-a6eb-103a9e5ed0ae", "address": "fa:16:3e:fa:67:6b", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb72ae1ea-1f", "ovs_interfaceid": "b72ae1ea-1ffa-4b9c-a6eb-103a9e5ed0ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1294.278150] env[70020]: DEBUG oslo_vmware.api [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619304, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.405237] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-454a88bd-c253-48d2-974a-54b8eb78297d tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1294.405680] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5a817138-edb0-43d7-abf2-bff5dc7305e3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.413931] env[70020]: DEBUG oslo_vmware.api [None req-454a88bd-c253-48d2-974a-54b8eb78297d tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1294.413931] env[70020]: value = "task-3619305" [ 1294.413931] env[70020]: _type = "Task" [ 1294.413931] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1294.422820] env[70020]: DEBUG oslo_vmware.api [None req-454a88bd-c253-48d2-974a-54b8eb78297d tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619305, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.473901] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c43208bd-b2d8-4da5-be22-ed9f623e45d0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Releasing lock "refresh_cache-e96aae17-5ae5-404b-bbe3-46777f7c34d2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1294.474633] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c43208bd-b2d8-4da5-be22-ed9f623e45d0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "e96aae17-5ae5-404b-bbe3-46777f7c34d2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1294.474804] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c43208bd-b2d8-4da5-be22-ed9f623e45d0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquired lock "e96aae17-5ae5-404b-bbe3-46777f7c34d2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1294.475824] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef3b94a1-dfea-4057-a877-a82dfac79167 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.493905] env[70020]: DEBUG nova.virt.hardware [None req-c43208bd-b2d8-4da5-be22-ed9f623e45d0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1294.494176] env[70020]: DEBUG nova.virt.hardware [None req-c43208bd-b2d8-4da5-be22-ed9f623e45d0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1294.494329] env[70020]: DEBUG nova.virt.hardware [None req-c43208bd-b2d8-4da5-be22-ed9f623e45d0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1294.494513] env[70020]: DEBUG nova.virt.hardware [None req-c43208bd-b2d8-4da5-be22-ed9f623e45d0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1294.494658] env[70020]: DEBUG nova.virt.hardware [None req-c43208bd-b2d8-4da5-be22-ed9f623e45d0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1294.494886] env[70020]: DEBUG nova.virt.hardware [None req-c43208bd-b2d8-4da5-be22-ed9f623e45d0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1294.495116] env[70020]: DEBUG nova.virt.hardware [None req-c43208bd-b2d8-4da5-be22-ed9f623e45d0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1294.495281] env[70020]: DEBUG nova.virt.hardware [None req-c43208bd-b2d8-4da5-be22-ed9f623e45d0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1294.495446] env[70020]: DEBUG nova.virt.hardware [None req-c43208bd-b2d8-4da5-be22-ed9f623e45d0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1294.495606] env[70020]: DEBUG nova.virt.hardware [None req-c43208bd-b2d8-4da5-be22-ed9f623e45d0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1294.495851] env[70020]: DEBUG nova.virt.hardware [None req-c43208bd-b2d8-4da5-be22-ed9f623e45d0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1294.502745] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c43208bd-b2d8-4da5-be22-ed9f623e45d0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Reconfiguring VM to attach interface {{(pid=70020) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1294.503521] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-358758a1-3150-48d2-8f6d-2d1afbad098f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.521679] env[70020]: DEBUG oslo_vmware.api [None req-c43208bd-b2d8-4da5-be22-ed9f623e45d0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1294.521679] env[70020]: value = "task-3619306" [ 1294.521679] env[70020]: _type = "Task" [ 1294.521679] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1294.530450] env[70020]: DEBUG oslo_vmware.api [None req-c43208bd-b2d8-4da5-be22-ed9f623e45d0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619306, 'name': ReconfigVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.594481] env[70020]: DEBUG nova.compute.manager [req-9124e8bd-91a1-4e0a-842d-c99986cefa74 req-c47cddb3-ff5d-48cf-b8af-506a7d7348e3 service nova] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Received event network-changed-b72ae1ea-1ffa-4b9c-a6eb-103a9e5ed0ae {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1294.594626] env[70020]: DEBUG nova.compute.manager [req-9124e8bd-91a1-4e0a-842d-c99986cefa74 req-c47cddb3-ff5d-48cf-b8af-506a7d7348e3 service nova] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Refreshing instance network info cache due to event network-changed-b72ae1ea-1ffa-4b9c-a6eb-103a9e5ed0ae. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1294.594882] env[70020]: DEBUG oslo_concurrency.lockutils [req-9124e8bd-91a1-4e0a-842d-c99986cefa74 req-c47cddb3-ff5d-48cf-b8af-506a7d7348e3 service nova] Acquiring lock "refresh_cache-e96aae17-5ae5-404b-bbe3-46777f7c34d2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1294.595012] env[70020]: DEBUG oslo_concurrency.lockutils [req-9124e8bd-91a1-4e0a-842d-c99986cefa74 req-c47cddb3-ff5d-48cf-b8af-506a7d7348e3 service nova] Acquired lock "refresh_cache-e96aae17-5ae5-404b-bbe3-46777f7c34d2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1294.595304] env[70020]: DEBUG nova.network.neutron [req-9124e8bd-91a1-4e0a-842d-c99986cefa74 req-c47cddb3-ff5d-48cf-b8af-506a7d7348e3 service nova] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Refreshing network info cache for port b72ae1ea-1ffa-4b9c-a6eb-103a9e5ed0ae {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1294.784211] env[70020]: DEBUG oslo_vmware.api [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619304, 'name': MoveVirtualDisk_Task} progress is 88%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.925040] env[70020]: DEBUG oslo_vmware.api [None req-454a88bd-c253-48d2-974a-54b8eb78297d tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619305, 'name': PowerOnVM_Task, 'duration_secs': 0.471894} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.925378] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-454a88bd-c253-48d2-974a-54b8eb78297d tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1294.926096] env[70020]: DEBUG nova.compute.manager [None req-454a88bd-c253-48d2-974a-54b8eb78297d tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1294.926570] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cfc26f5-2e33-42e6-b20d-aa375dad77fb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.031402] env[70020]: DEBUG oslo_vmware.api [None req-c43208bd-b2d8-4da5-be22-ed9f623e45d0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619306, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.278862] env[70020]: DEBUG oslo_vmware.api [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619304, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.532796] env[70020]: DEBUG oslo_vmware.api [None req-c43208bd-b2d8-4da5-be22-ed9f623e45d0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619306, 'name': ReconfigVM_Task, 'duration_secs': 0.6659} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.533523] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c43208bd-b2d8-4da5-be22-ed9f623e45d0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Releasing lock "e96aae17-5ae5-404b-bbe3-46777f7c34d2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1295.533854] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c43208bd-b2d8-4da5-be22-ed9f623e45d0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Reconfigured VM to attach interface {{(pid=70020) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1295.649980] env[70020]: DEBUG nova.network.neutron [req-9124e8bd-91a1-4e0a-842d-c99986cefa74 req-c47cddb3-ff5d-48cf-b8af-506a7d7348e3 service nova] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Updated VIF entry in instance network info cache for port b72ae1ea-1ffa-4b9c-a6eb-103a9e5ed0ae. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1295.649980] env[70020]: DEBUG nova.network.neutron [req-9124e8bd-91a1-4e0a-842d-c99986cefa74 req-c47cddb3-ff5d-48cf-b8af-506a7d7348e3 service nova] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Updating instance_info_cache with network_info: [{"id": "0889717d-3194-4204-a46b-57e94fc35d6c", "address": "fa:16:3e:ed:64:03", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0889717d-31", "ovs_interfaceid": "0889717d-3194-4204-a46b-57e94fc35d6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b72ae1ea-1ffa-4b9c-a6eb-103a9e5ed0ae", "address": "fa:16:3e:fa:67:6b", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb72ae1ea-1f", "ovs_interfaceid": "b72ae1ea-1ffa-4b9c-a6eb-103a9e5ed0ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1295.779502] env[70020]: DEBUG oslo_vmware.api [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619304, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.778357} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.779810] env[70020]: INFO nova.virt.vmwareapi.ds_util [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_67d2586c-a85b-4870-b0c4-6b14cc731a38/OSTACK_IMG_67d2586c-a85b-4870-b0c4-6b14cc731a38.vmdk to [datastore2] devstack-image-cache_base/1824125a-6f02-476b-8b5c-9899bfa470d7/1824125a-6f02-476b-8b5c-9899bfa470d7.vmdk. [ 1295.779996] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Cleaning up location [datastore2] OSTACK_IMG_67d2586c-a85b-4870-b0c4-6b14cc731a38 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1295.780174] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_67d2586c-a85b-4870-b0c4-6b14cc731a38 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1295.780415] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a00d99cb-b4ee-47ba-92ba-7087d1da1d1c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.787342] env[70020]: DEBUG oslo_vmware.api [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1295.787342] env[70020]: value = "task-3619307" [ 1295.787342] env[70020]: _type = "Task" [ 1295.787342] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.794828] env[70020]: DEBUG oslo_vmware.api [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619307, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.038873] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c43208bd-b2d8-4da5-be22-ed9f623e45d0 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "interface-e96aae17-5ae5-404b-bbe3-46777f7c34d2-b72ae1ea-1ffa-4b9c-a6eb-103a9e5ed0ae" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.023s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1296.151417] env[70020]: DEBUG oslo_concurrency.lockutils [req-9124e8bd-91a1-4e0a-842d-c99986cefa74 req-c47cddb3-ff5d-48cf-b8af-506a7d7348e3 service nova] Releasing lock "refresh_cache-e96aae17-5ae5-404b-bbe3-46777f7c34d2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1296.297651] env[70020]: DEBUG oslo_vmware.api [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619307, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.175265} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.298058] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1296.298179] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1824125a-6f02-476b-8b5c-9899bfa470d7/1824125a-6f02-476b-8b5c-9899bfa470d7.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1296.298495] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/1824125a-6f02-476b-8b5c-9899bfa470d7/1824125a-6f02-476b-8b5c-9899bfa470d7.vmdk to [datastore2] 399d55b7-2a79-4849-89b6-ff8d1c0d33e1/399d55b7-2a79-4849-89b6-ff8d1c0d33e1.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1296.298786] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3ad05c3b-16ad-4bd6-89cf-2d4a2f68441b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.305953] env[70020]: DEBUG oslo_vmware.api [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1296.305953] env[70020]: value = "task-3619308" [ 1296.305953] env[70020]: _type = "Task" [ 1296.305953] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.314515] env[70020]: DEBUG oslo_vmware.api [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619308, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.358174] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8df782b7-4e04-4d76-b349-f1b03c7cb366 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.365047] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5b6fc181-a010-4eed-af0d-1e1b199a77fa tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Suspending the VM {{(pid=70020) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1296.365344] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-744fb0b8-7adf-4d44-8dc8-b5c0fc3072a1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.372088] env[70020]: DEBUG oslo_vmware.api [None req-5b6fc181-a010-4eed-af0d-1e1b199a77fa tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1296.372088] env[70020]: value = "task-3619309" [ 1296.372088] env[70020]: _type = "Task" [ 1296.372088] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.379952] env[70020]: DEBUG oslo_vmware.api [None req-5b6fc181-a010-4eed-af0d-1e1b199a77fa tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619309, 'name': SuspendVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.815669] env[70020]: DEBUG oslo_vmware.api [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619308, 'name': CopyVirtualDisk_Task} progress is 15%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.882345] env[70020]: DEBUG oslo_vmware.api [None req-5b6fc181-a010-4eed-af0d-1e1b199a77fa tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619309, 'name': SuspendVM_Task} progress is 70%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.318496] env[70020]: DEBUG oslo_vmware.api [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619308, 'name': CopyVirtualDisk_Task} progress is 35%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.384082] env[70020]: DEBUG oslo_vmware.api [None req-5b6fc181-a010-4eed-af0d-1e1b199a77fa tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619309, 'name': SuspendVM_Task, 'duration_secs': 0.720462} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.384304] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5b6fc181-a010-4eed-af0d-1e1b199a77fa tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Suspended the VM {{(pid=70020) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1297.384490] env[70020]: DEBUG nova.compute.manager [None req-5b6fc181-a010-4eed-af0d-1e1b199a77fa tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1297.385421] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4000b1a0-db4c-41d5-9877-3fb041b0252c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.730561] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9b9d3f39-9eff-4ddb-83ac-01f52041b625 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "interface-e96aae17-5ae5-404b-bbe3-46777f7c34d2-b72ae1ea-1ffa-4b9c-a6eb-103a9e5ed0ae" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1297.730561] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9b9d3f39-9eff-4ddb-83ac-01f52041b625 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "interface-e96aae17-5ae5-404b-bbe3-46777f7c34d2-b72ae1ea-1ffa-4b9c-a6eb-103a9e5ed0ae" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1297.816892] env[70020]: DEBUG oslo_vmware.api [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619308, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.233209] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9b9d3f39-9eff-4ddb-83ac-01f52041b625 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "e96aae17-5ae5-404b-bbe3-46777f7c34d2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1298.233372] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9b9d3f39-9eff-4ddb-83ac-01f52041b625 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquired lock "e96aae17-5ae5-404b-bbe3-46777f7c34d2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1298.234435] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b67ae6ce-29dc-4f0a-b441-9bf19b9febe8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.253828] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dccc9e3a-de3d-46dd-916e-e0ffe9de4695 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.280481] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9b9d3f39-9eff-4ddb-83ac-01f52041b625 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Reconfiguring VM to detach interface {{(pid=70020) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1298.280809] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9dbb445d-b0f1-44f0-bcf3-fec8d6449db3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.300397] env[70020]: DEBUG oslo_vmware.api [None req-9b9d3f39-9eff-4ddb-83ac-01f52041b625 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1298.300397] env[70020]: value = "task-3619310" [ 1298.300397] env[70020]: _type = "Task" [ 1298.300397] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.312719] env[70020]: DEBUG oslo_vmware.api [None req-9b9d3f39-9eff-4ddb-83ac-01f52041b625 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619310, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.320410] env[70020]: DEBUG oslo_vmware.api [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619308, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.704116] env[70020]: INFO nova.compute.manager [None req-7a955677-57a6-4c71-b26d-c17a873b180b tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Resuming [ 1298.704778] env[70020]: DEBUG nova.objects.instance [None req-7a955677-57a6-4c71-b26d-c17a873b180b tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lazy-loading 'flavor' on Instance uuid 845ea37a-9945-49cd-a1bd-3da91f4af16b {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1298.810704] env[70020]: DEBUG oslo_vmware.api [None req-9b9d3f39-9eff-4ddb-83ac-01f52041b625 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619310, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.820140] env[70020]: DEBUG oslo_vmware.api [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619308, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.363837} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.820375] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/1824125a-6f02-476b-8b5c-9899bfa470d7/1824125a-6f02-476b-8b5c-9899bfa470d7.vmdk to [datastore2] 399d55b7-2a79-4849-89b6-ff8d1c0d33e1/399d55b7-2a79-4849-89b6-ff8d1c0d33e1.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1298.821119] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-485131fc-77b6-463e-b2f6-86d42cedb052 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.841656] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Reconfiguring VM instance instance-00000076 to attach disk [datastore2] 399d55b7-2a79-4849-89b6-ff8d1c0d33e1/399d55b7-2a79-4849-89b6-ff8d1c0d33e1.vmdk or device None with type streamOptimized {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1298.841877] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-822cf493-41a3-46ec-a307-bcc026c2b7a6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.860466] env[70020]: DEBUG oslo_vmware.api [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1298.860466] env[70020]: value = "task-3619311" [ 1298.860466] env[70020]: _type = "Task" [ 1298.860466] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.867726] env[70020]: DEBUG oslo_vmware.api [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619311, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.311669] env[70020]: DEBUG oslo_vmware.api [None req-9b9d3f39-9eff-4ddb-83ac-01f52041b625 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619310, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.370684] env[70020]: DEBUG oslo_vmware.api [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619311, 'name': ReconfigVM_Task, 'duration_secs': 0.25384} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.371065] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Reconfigured VM instance instance-00000076 to attach disk [datastore2] 399d55b7-2a79-4849-89b6-ff8d1c0d33e1/399d55b7-2a79-4849-89b6-ff8d1c0d33e1.vmdk or device None with type streamOptimized {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1299.372071] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'guest_format': None, 'device_name': '/dev/sda', 'boot_index': 0, 'disk_bus': None, 'encryption_format': None, 'encryption_secret_uuid': None, 'encrypted': False, 'encryption_options': None, 'size': 0, 'device_type': 'disk', 'image_id': 'c9cd83bf-fd12-4173-a067-f57d38f23556'}], 'ephemerals': [], 'block_device_mapping': [{'attachment_id': 'c03d15bb-a0f3-4c7b-b1df-017e467d1a12', 'guest_format': None, 'delete_on_termination': False, 'disk_bus': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721844', 'volume_id': 'fd719fc4-e0af-4335-b966-f7758bcc701e', 'name': 'volume-fd719fc4-e0af-4335-b966-f7758bcc701e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '399d55b7-2a79-4849-89b6-ff8d1c0d33e1', 'attached_at': '', 'detached_at': '', 'volume_id': 'fd719fc4-e0af-4335-b966-f7758bcc701e', 'serial': 'fd719fc4-e0af-4335-b966-f7758bcc701e'}, 'mount_device': '/dev/sdb', 'boot_index': None, 'device_type': None, 'volume_type': None}], 'swap': None} {{(pid=70020) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1299.372278] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Volume attach. Driver type: vmdk {{(pid=70020) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1299.372465] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721844', 'volume_id': 'fd719fc4-e0af-4335-b966-f7758bcc701e', 'name': 'volume-fd719fc4-e0af-4335-b966-f7758bcc701e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '399d55b7-2a79-4849-89b6-ff8d1c0d33e1', 'attached_at': '', 'detached_at': '', 'volume_id': 'fd719fc4-e0af-4335-b966-f7758bcc701e', 'serial': 'fd719fc4-e0af-4335-b966-f7758bcc701e'} {{(pid=70020) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1299.373317] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3586e80a-f69a-439f-957a-34e4abc94a63 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.388460] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3373a25-e755-4a49-98b1-67f4610345b4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.412095] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Reconfiguring VM instance instance-00000076 to attach disk [datastore2] volume-fd719fc4-e0af-4335-b966-f7758bcc701e/volume-fd719fc4-e0af-4335-b966-f7758bcc701e.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1299.412398] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b6eaa20-18e1-45ba-91f6-60d15a940607 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.429697] env[70020]: DEBUG oslo_vmware.api [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1299.429697] env[70020]: value = "task-3619312" [ 1299.429697] env[70020]: _type = "Task" [ 1299.429697] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.436908] env[70020]: DEBUG oslo_vmware.api [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619312, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.810690] env[70020]: DEBUG oslo_vmware.api [None req-9b9d3f39-9eff-4ddb-83ac-01f52041b625 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619310, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.938980] env[70020]: DEBUG oslo_vmware.api [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619312, 'name': ReconfigVM_Task, 'duration_secs': 0.274855} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.939265] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Reconfigured VM instance instance-00000076 to attach disk [datastore2] volume-fd719fc4-e0af-4335-b966-f7758bcc701e/volume-fd719fc4-e0af-4335-b966-f7758bcc701e.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1299.944130] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e3cf90cb-0a16-497b-9940-0936919444ab {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.958495] env[70020]: DEBUG oslo_vmware.api [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1299.958495] env[70020]: value = "task-3619313" [ 1299.958495] env[70020]: _type = "Task" [ 1299.958495] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.966289] env[70020]: DEBUG oslo_vmware.api [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619313, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.215244] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7a955677-57a6-4c71-b26d-c17a873b180b tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "refresh_cache-845ea37a-9945-49cd-a1bd-3da91f4af16b" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1300.215534] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7a955677-57a6-4c71-b26d-c17a873b180b tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquired lock "refresh_cache-845ea37a-9945-49cd-a1bd-3da91f4af16b" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1300.215651] env[70020]: DEBUG nova.network.neutron [None req-7a955677-57a6-4c71-b26d-c17a873b180b tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1300.311965] env[70020]: DEBUG oslo_vmware.api [None req-9b9d3f39-9eff-4ddb-83ac-01f52041b625 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619310, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.469174] env[70020]: DEBUG oslo_vmware.api [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619313, 'name': ReconfigVM_Task, 'duration_secs': 0.143856} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.469174] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721844', 'volume_id': 'fd719fc4-e0af-4335-b966-f7758bcc701e', 'name': 'volume-fd719fc4-e0af-4335-b966-f7758bcc701e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '399d55b7-2a79-4849-89b6-ff8d1c0d33e1', 'attached_at': '', 'detached_at': '', 'volume_id': 'fd719fc4-e0af-4335-b966-f7758bcc701e', 'serial': 'fd719fc4-e0af-4335-b966-f7758bcc701e'} {{(pid=70020) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1300.469695] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-174d3ff5-d3ae-4b91-986a-8187cb2efb17 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.476148] env[70020]: DEBUG oslo_vmware.api [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1300.476148] env[70020]: value = "task-3619314" [ 1300.476148] env[70020]: _type = "Task" [ 1300.476148] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.484601] env[70020]: DEBUG oslo_vmware.api [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619314, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.812145] env[70020]: DEBUG oslo_vmware.api [None req-9b9d3f39-9eff-4ddb-83ac-01f52041b625 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619310, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.912144] env[70020]: DEBUG nova.network.neutron [None req-7a955677-57a6-4c71-b26d-c17a873b180b tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Updating instance_info_cache with network_info: [{"id": "f4583380-5208-4372-ab67-cc6b64a287d2", "address": "fa:16:3e:29:f4:ad", "network": {"id": "ba2c4d3c-4191-4de5-8533-90ca5dfc1dc0", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-599216784-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e3eae740ef84ef88aef113ed4d6e57b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b21ab10-d886-4453-9472-9e11fb3c450d", "external-id": "nsx-vlan-transportzone-885", "segmentation_id": 885, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4583380-52", "ovs_interfaceid": "f4583380-5208-4372-ab67-cc6b64a287d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1300.986214] env[70020]: DEBUG oslo_vmware.api [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619314, 'name': Rename_Task, 'duration_secs': 0.159556} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.986718] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1300.986952] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8f9684b0-a5fd-4a68-8971-091c896d9125 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.992369] env[70020]: DEBUG oslo_vmware.api [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1300.992369] env[70020]: value = "task-3619315" [ 1300.992369] env[70020]: _type = "Task" [ 1300.992369] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.999368] env[70020]: DEBUG oslo_vmware.api [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619315, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.031909] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "d28f6dff-8f9f-41d4-87ae-0ff87327d042" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1301.032193] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "d28f6dff-8f9f-41d4-87ae-0ff87327d042" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1301.032430] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "d28f6dff-8f9f-41d4-87ae-0ff87327d042-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1301.032586] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "d28f6dff-8f9f-41d4-87ae-0ff87327d042-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1301.032749] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "d28f6dff-8f9f-41d4-87ae-0ff87327d042-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1301.034821] env[70020]: INFO nova.compute.manager [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Terminating instance [ 1301.314168] env[70020]: DEBUG oslo_vmware.api [None req-9b9d3f39-9eff-4ddb-83ac-01f52041b625 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619310, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.415290] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7a955677-57a6-4c71-b26d-c17a873b180b tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Releasing lock "refresh_cache-845ea37a-9945-49cd-a1bd-3da91f4af16b" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1301.416446] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4210095e-aa2b-46df-a14c-a80501fe6ad7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.423167] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-7a955677-57a6-4c71-b26d-c17a873b180b tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Resuming the VM {{(pid=70020) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1301.423399] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a77f3c02-e961-4b0c-b5a7-1aa65fdb2a56 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.428804] env[70020]: DEBUG oslo_vmware.api [None req-7a955677-57a6-4c71-b26d-c17a873b180b tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1301.428804] env[70020]: value = "task-3619316" [ 1301.428804] env[70020]: _type = "Task" [ 1301.428804] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.436578] env[70020]: DEBUG oslo_vmware.api [None req-7a955677-57a6-4c71-b26d-c17a873b180b tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619316, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.501211] env[70020]: DEBUG oslo_vmware.api [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619315, 'name': PowerOnVM_Task, 'duration_secs': 0.422914} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.501528] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1301.538501] env[70020]: DEBUG nova.compute.manager [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1301.538805] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1301.539113] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f01e3e16-3726-44f4-9b11-640b098e0d0d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.546584] env[70020]: DEBUG oslo_vmware.api [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1301.546584] env[70020]: value = "task-3619317" [ 1301.546584] env[70020]: _type = "Task" [ 1301.546584] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.554529] env[70020]: DEBUG oslo_vmware.api [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619317, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.604851] env[70020]: DEBUG nova.compute.manager [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1301.605885] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a999112-79f9-42f4-a8d2-aec25c772c8c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.813601] env[70020]: DEBUG oslo_vmware.api [None req-9b9d3f39-9eff-4ddb-83ac-01f52041b625 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619310, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.938743] env[70020]: DEBUG oslo_vmware.api [None req-7a955677-57a6-4c71-b26d-c17a873b180b tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619316, 'name': PowerOnVM_Task} progress is 79%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.057715] env[70020]: DEBUG oslo_vmware.api [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619317, 'name': PowerOffVM_Task, 'duration_secs': 0.221023} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1302.058059] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1302.058284] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Volume detach. Driver type: vmdk {{(pid=70020) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1302.058479] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721830', 'volume_id': '019bffbe-24bf-4b30-80fe-f387c8bba21b', 'name': 'volume-019bffbe-24bf-4b30-80fe-f387c8bba21b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'd28f6dff-8f9f-41d4-87ae-0ff87327d042', 'attached_at': '2025-04-25T23:12:05.000000', 'detached_at': '', 'volume_id': '019bffbe-24bf-4b30-80fe-f387c8bba21b', 'serial': '019bffbe-24bf-4b30-80fe-f387c8bba21b'} {{(pid=70020) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1302.059352] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b898924-435d-42be-92fb-a93fbafe83bc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.078317] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35c8e9c9-7284-4510-b604-3921a61ff85c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.085631] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31fe30da-e645-4953-a068-7b769c744c2a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.104454] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94454165-30dd-4aad-9cc7-1de86a3f64d8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.123960] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] The volume has not been displaced from its original location: [datastore1] volume-019bffbe-24bf-4b30-80fe-f387c8bba21b/volume-019bffbe-24bf-4b30-80fe-f387c8bba21b.vmdk. No consolidation needed. {{(pid=70020) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1302.129441] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Reconfiguring VM instance instance-00000079 to detach disk 2000 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1302.131454] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1062c10c-4ee4-4073-b57d-a7ad77dc5e10 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.144565] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e0125010-8d9d-410f-beca-0207855bafe0 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lock "399d55b7-2a79-4849-89b6-ff8d1c0d33e1" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 26.164s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1302.151211] env[70020]: DEBUG oslo_vmware.api [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1302.151211] env[70020]: value = "task-3619318" [ 1302.151211] env[70020]: _type = "Task" [ 1302.151211] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1302.161744] env[70020]: DEBUG oslo_vmware.api [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619318, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.314645] env[70020]: DEBUG oslo_vmware.api [None req-9b9d3f39-9eff-4ddb-83ac-01f52041b625 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619310, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.438914] env[70020]: DEBUG oslo_vmware.api [None req-7a955677-57a6-4c71-b26d-c17a873b180b tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619316, 'name': PowerOnVM_Task, 'duration_secs': 0.785891} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1302.439317] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-7a955677-57a6-4c71-b26d-c17a873b180b tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Resumed the VM {{(pid=70020) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1302.439498] env[70020]: DEBUG nova.compute.manager [None req-7a955677-57a6-4c71-b26d-c17a873b180b tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1302.440378] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1309ded0-a3f6-4c15-898f-914f991c2454 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.661351] env[70020]: DEBUG oslo_vmware.api [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619318, 'name': ReconfigVM_Task, 'duration_secs': 0.355752} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1302.661694] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Reconfigured VM instance instance-00000079 to detach disk 2000 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1302.666449] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f1fb5db0-d043-4814-a064-57e4c3797ddd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.681672] env[70020]: DEBUG oslo_vmware.api [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1302.681672] env[70020]: value = "task-3619319" [ 1302.681672] env[70020]: _type = "Task" [ 1302.681672] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1302.689833] env[70020]: DEBUG oslo_vmware.api [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619319, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.815171] env[70020]: DEBUG oslo_vmware.api [None req-9b9d3f39-9eff-4ddb-83ac-01f52041b625 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619310, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.191862] env[70020]: DEBUG oslo_vmware.api [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619319, 'name': ReconfigVM_Task, 'duration_secs': 0.144354} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1303.192169] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721830', 'volume_id': '019bffbe-24bf-4b30-80fe-f387c8bba21b', 'name': 'volume-019bffbe-24bf-4b30-80fe-f387c8bba21b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'd28f6dff-8f9f-41d4-87ae-0ff87327d042', 'attached_at': '2025-04-25T23:12:05.000000', 'detached_at': '', 'volume_id': '019bffbe-24bf-4b30-80fe-f387c8bba21b', 'serial': '019bffbe-24bf-4b30-80fe-f387c8bba21b'} {{(pid=70020) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1303.192436] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1303.193185] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fcba1ec-f308-4e9b-a791-d7ee9a293082 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.199397] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1303.199617] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-49ddea4f-d127-4b57-9375-60a7dee97174 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.261172] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1303.261355] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1303.261537] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Deleting the datastore file [datastore1] d28f6dff-8f9f-41d4-87ae-0ff87327d042 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1303.261800] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4c930e48-04b2-49d9-a01d-9b56d1e37e82 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.268089] env[70020]: DEBUG oslo_vmware.api [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1303.268089] env[70020]: value = "task-3619321" [ 1303.268089] env[70020]: _type = "Task" [ 1303.268089] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1303.275289] env[70020]: DEBUG oslo_vmware.api [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619321, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.314677] env[70020]: DEBUG oslo_vmware.api [None req-9b9d3f39-9eff-4ddb-83ac-01f52041b625 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619310, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.380329] env[70020]: DEBUG oslo_concurrency.lockutils [None req-78d23a08-5feb-4580-bdf2-17e521728f78 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "845ea37a-9945-49cd-a1bd-3da91f4af16b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1303.380568] env[70020]: DEBUG oslo_concurrency.lockutils [None req-78d23a08-5feb-4580-bdf2-17e521728f78 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "845ea37a-9945-49cd-a1bd-3da91f4af16b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1303.380775] env[70020]: DEBUG oslo_concurrency.lockutils [None req-78d23a08-5feb-4580-bdf2-17e521728f78 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "845ea37a-9945-49cd-a1bd-3da91f4af16b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1303.380951] env[70020]: DEBUG oslo_concurrency.lockutils [None req-78d23a08-5feb-4580-bdf2-17e521728f78 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "845ea37a-9945-49cd-a1bd-3da91f4af16b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1303.381130] env[70020]: DEBUG oslo_concurrency.lockutils [None req-78d23a08-5feb-4580-bdf2-17e521728f78 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "845ea37a-9945-49cd-a1bd-3da91f4af16b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1303.383203] env[70020]: INFO nova.compute.manager [None req-78d23a08-5feb-4580-bdf2-17e521728f78 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Terminating instance [ 1303.777759] env[70020]: DEBUG oslo_vmware.api [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619321, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.819012] env[70020]: DEBUG oslo_vmware.api [None req-9b9d3f39-9eff-4ddb-83ac-01f52041b625 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619310, 'name': ReconfigVM_Task} progress is 18%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.887038] env[70020]: DEBUG nova.compute.manager [None req-78d23a08-5feb-4580-bdf2-17e521728f78 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1303.887173] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-78d23a08-5feb-4580-bdf2-17e521728f78 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1303.888106] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a9a62e2-fb29-4bbc-be38-d48cbf900d08 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.895823] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-78d23a08-5feb-4580-bdf2-17e521728f78 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1303.896073] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5587965f-9f3e-4468-b6ba-13f83e1469c8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.901748] env[70020]: DEBUG oslo_vmware.api [None req-78d23a08-5feb-4580-bdf2-17e521728f78 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1303.901748] env[70020]: value = "task-3619322" [ 1303.901748] env[70020]: _type = "Task" [ 1303.901748] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1303.910409] env[70020]: DEBUG oslo_vmware.api [None req-78d23a08-5feb-4580-bdf2-17e521728f78 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619322, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.278923] env[70020]: DEBUG oslo_vmware.api [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619321, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.710303} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1304.279176] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1304.279366] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1304.279543] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1304.279734] env[70020]: INFO nova.compute.manager [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Took 2.74 seconds to destroy the instance on the hypervisor. [ 1304.279992] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1304.280203] env[70020]: DEBUG nova.compute.manager [-] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1304.280299] env[70020]: DEBUG nova.network.neutron [-] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1304.318397] env[70020]: DEBUG oslo_vmware.api [None req-9b9d3f39-9eff-4ddb-83ac-01f52041b625 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619310, 'name': ReconfigVM_Task, 'duration_secs': 5.774615} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1304.318633] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9b9d3f39-9eff-4ddb-83ac-01f52041b625 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Releasing lock "e96aae17-5ae5-404b-bbe3-46777f7c34d2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1304.318834] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-9b9d3f39-9eff-4ddb-83ac-01f52041b625 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Reconfigured VM to detach interface {{(pid=70020) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1304.410703] env[70020]: DEBUG oslo_vmware.api [None req-78d23a08-5feb-4580-bdf2-17e521728f78 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619322, 'name': PowerOffVM_Task, 'duration_secs': 0.283424} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1304.412035] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-78d23a08-5feb-4580-bdf2-17e521728f78 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1304.412035] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-78d23a08-5feb-4580-bdf2-17e521728f78 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1304.412035] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0878e9b4-e9fd-4615-a1c3-5a652540bf19 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.478683] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-78d23a08-5feb-4580-bdf2-17e521728f78 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1304.478967] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-78d23a08-5feb-4580-bdf2-17e521728f78 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1304.479094] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-78d23a08-5feb-4580-bdf2-17e521728f78 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Deleting the datastore file [datastore1] 845ea37a-9945-49cd-a1bd-3da91f4af16b {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1304.479540] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-46096cfc-d74b-4e5f-a57c-d87f7d3b409f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.485449] env[70020]: DEBUG oslo_vmware.api [None req-78d23a08-5feb-4580-bdf2-17e521728f78 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for the task: (returnval){ [ 1304.485449] env[70020]: value = "task-3619324" [ 1304.485449] env[70020]: _type = "Task" [ 1304.485449] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.493597] env[70020]: DEBUG oslo_vmware.api [None req-78d23a08-5feb-4580-bdf2-17e521728f78 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619324, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.775909] env[70020]: DEBUG nova.compute.manager [req-21da8bfc-deaf-4eba-b687-2c517e796c94 req-16c1cb32-667d-4d80-b509-0ccbd0558369 service nova] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Received event network-vif-deleted-c963937e-c9c9-452b-a0d2-b2a4314681dd {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1304.775909] env[70020]: INFO nova.compute.manager [req-21da8bfc-deaf-4eba-b687-2c517e796c94 req-16c1cb32-667d-4d80-b509-0ccbd0558369 service nova] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Neutron deleted interface c963937e-c9c9-452b-a0d2-b2a4314681dd; detaching it from the instance and deleting it from the info cache [ 1304.775909] env[70020]: DEBUG nova.network.neutron [req-21da8bfc-deaf-4eba-b687-2c517e796c94 req-16c1cb32-667d-4d80-b509-0ccbd0558369 service nova] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1304.995528] env[70020]: DEBUG oslo_vmware.api [None req-78d23a08-5feb-4580-bdf2-17e521728f78 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Task: {'id': task-3619324, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140173} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1304.995796] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-78d23a08-5feb-4580-bdf2-17e521728f78 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1304.996406] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-78d23a08-5feb-4580-bdf2-17e521728f78 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1304.996406] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-78d23a08-5feb-4580-bdf2-17e521728f78 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1304.996406] env[70020]: INFO nova.compute.manager [None req-78d23a08-5feb-4580-bdf2-17e521728f78 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1304.996585] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-78d23a08-5feb-4580-bdf2-17e521728f78 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1304.996701] env[70020]: DEBUG nova.compute.manager [-] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1304.996795] env[70020]: DEBUG nova.network.neutron [-] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1305.260916] env[70020]: DEBUG nova.network.neutron [-] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1305.278403] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bcea10ca-4f0f-4a4f-a4cf-69deecef0590 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.288739] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3546fbef-f65a-4e03-a427-582d01f04f33 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.317677] env[70020]: DEBUG nova.compute.manager [req-21da8bfc-deaf-4eba-b687-2c517e796c94 req-16c1cb32-667d-4d80-b509-0ccbd0558369 service nova] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Detach interface failed, port_id=c963937e-c9c9-452b-a0d2-b2a4314681dd, reason: Instance d28f6dff-8f9f-41d4-87ae-0ff87327d042 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1305.584369] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9b9d3f39-9eff-4ddb-83ac-01f52041b625 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "refresh_cache-e96aae17-5ae5-404b-bbe3-46777f7c34d2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1305.584559] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9b9d3f39-9eff-4ddb-83ac-01f52041b625 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquired lock "refresh_cache-e96aae17-5ae5-404b-bbe3-46777f7c34d2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1305.584740] env[70020]: DEBUG nova.network.neutron [None req-9b9d3f39-9eff-4ddb-83ac-01f52041b625 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1305.765077] env[70020]: INFO nova.compute.manager [-] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Took 1.48 seconds to deallocate network for instance. [ 1306.223393] env[70020]: DEBUG nova.network.neutron [-] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1306.275085] env[70020]: INFO nova.network.neutron [None req-9b9d3f39-9eff-4ddb-83ac-01f52041b625 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Port b72ae1ea-1ffa-4b9c-a6eb-103a9e5ed0ae from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1306.275474] env[70020]: DEBUG nova.network.neutron [None req-9b9d3f39-9eff-4ddb-83ac-01f52041b625 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Updating instance_info_cache with network_info: [{"id": "0889717d-3194-4204-a46b-57e94fc35d6c", "address": "fa:16:3e:ed:64:03", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0889717d-31", "ovs_interfaceid": "0889717d-3194-4204-a46b-57e94fc35d6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1306.308328] env[70020]: INFO nova.compute.manager [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Took 0.54 seconds to detach 1 volumes for instance. [ 1306.310980] env[70020]: DEBUG nova.compute.manager [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Deleting volume: 019bffbe-24bf-4b30-80fe-f387c8bba21b {{(pid=70020) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1306.389368] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f2affe58-a2b3-494b-976f-f629b956740f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "interface-4b73ae75-c403-4268-8eab-4d6c32aef950-b72ae1ea-1ffa-4b9c-a6eb-103a9e5ed0ae" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1306.389368] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f2affe58-a2b3-494b-976f-f629b956740f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "interface-4b73ae75-c403-4268-8eab-4d6c32aef950-b72ae1ea-1ffa-4b9c-a6eb-103a9e5ed0ae" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1306.389368] env[70020]: DEBUG nova.objects.instance [None req-f2affe58-a2b3-494b-976f-f629b956740f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lazy-loading 'flavor' on Instance uuid 4b73ae75-c403-4268-8eab-4d6c32aef950 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1306.727097] env[70020]: INFO nova.compute.manager [-] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Took 1.73 seconds to deallocate network for instance. [ 1306.778293] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9b9d3f39-9eff-4ddb-83ac-01f52041b625 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Releasing lock "refresh_cache-e96aae17-5ae5-404b-bbe3-46777f7c34d2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1306.814127] env[70020]: DEBUG nova.compute.manager [req-304111b2-d0b8-41ad-b713-27ca09508d6a req-986fbeb6-7692-4a85-ae10-9d1cb58eb8af service nova] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Received event network-changed-0889717d-3194-4204-a46b-57e94fc35d6c {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1306.814322] env[70020]: DEBUG nova.compute.manager [req-304111b2-d0b8-41ad-b713-27ca09508d6a req-986fbeb6-7692-4a85-ae10-9d1cb58eb8af service nova] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Refreshing instance network info cache due to event network-changed-0889717d-3194-4204-a46b-57e94fc35d6c. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1306.814533] env[70020]: DEBUG oslo_concurrency.lockutils [req-304111b2-d0b8-41ad-b713-27ca09508d6a req-986fbeb6-7692-4a85-ae10-9d1cb58eb8af service nova] Acquiring lock "refresh_cache-e96aae17-5ae5-404b-bbe3-46777f7c34d2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1306.814701] env[70020]: DEBUG oslo_concurrency.lockutils [req-304111b2-d0b8-41ad-b713-27ca09508d6a req-986fbeb6-7692-4a85-ae10-9d1cb58eb8af service nova] Acquired lock "refresh_cache-e96aae17-5ae5-404b-bbe3-46777f7c34d2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1306.814822] env[70020]: DEBUG nova.network.neutron [req-304111b2-d0b8-41ad-b713-27ca09508d6a req-986fbeb6-7692-4a85-ae10-9d1cb58eb8af service nova] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Refreshing network info cache for port 0889717d-3194-4204-a46b-57e94fc35d6c {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1306.852052] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1306.852348] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1306.852540] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1306.873446] env[70020]: INFO nova.scheduler.client.report [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Deleted allocations for instance d28f6dff-8f9f-41d4-87ae-0ff87327d042 [ 1306.982150] env[70020]: DEBUG nova.objects.instance [None req-f2affe58-a2b3-494b-976f-f629b956740f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lazy-loading 'pci_requests' on Instance uuid 4b73ae75-c403-4268-8eab-4d6c32aef950 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1307.233610] env[70020]: DEBUG oslo_concurrency.lockutils [None req-78d23a08-5feb-4580-bdf2-17e521728f78 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1307.233885] env[70020]: DEBUG oslo_concurrency.lockutils [None req-78d23a08-5feb-4580-bdf2-17e521728f78 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1307.234109] env[70020]: DEBUG nova.objects.instance [None req-78d23a08-5feb-4580-bdf2-17e521728f78 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lazy-loading 'resources' on Instance uuid 845ea37a-9945-49cd-a1bd-3da91f4af16b {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1307.282124] env[70020]: DEBUG oslo_concurrency.lockutils [None req-9b9d3f39-9eff-4ddb-83ac-01f52041b625 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "interface-e96aae17-5ae5-404b-bbe3-46777f7c34d2-b72ae1ea-1ffa-4b9c-a6eb-103a9e5ed0ae" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.552s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1307.382428] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0957d4e0-e68a-458b-b347-5e4843a8e9a6 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "d28f6dff-8f9f-41d4-87ae-0ff87327d042" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.350s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1307.484759] env[70020]: DEBUG nova.objects.base [None req-f2affe58-a2b3-494b-976f-f629b956740f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Object Instance<4b73ae75-c403-4268-8eab-4d6c32aef950> lazy-loaded attributes: flavor,pci_requests {{(pid=70020) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1307.484973] env[70020]: DEBUG nova.network.neutron [None req-f2affe58-a2b3-494b-976f-f629b956740f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1307.509797] env[70020]: DEBUG nova.network.neutron [req-304111b2-d0b8-41ad-b713-27ca09508d6a req-986fbeb6-7692-4a85-ae10-9d1cb58eb8af service nova] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Updated VIF entry in instance network info cache for port 0889717d-3194-4204-a46b-57e94fc35d6c. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1307.510179] env[70020]: DEBUG nova.network.neutron [req-304111b2-d0b8-41ad-b713-27ca09508d6a req-986fbeb6-7692-4a85-ae10-9d1cb58eb8af service nova] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Updating instance_info_cache with network_info: [{"id": "0889717d-3194-4204-a46b-57e94fc35d6c", "address": "fa:16:3e:ed:64:03", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0889717d-31", "ovs_interfaceid": "0889717d-3194-4204-a46b-57e94fc35d6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1307.547591] env[70020]: DEBUG nova.policy [None req-f2affe58-a2b3-494b-976f-f629b956740f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '914fc4078a214da891e7d12d242504cb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0573da12f56f4b18a103e4e9fdfb9c19', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1307.702043] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2ddb6b64-8aad-46c3-8742-54cb9392b419 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "c9a3fb0f-95bf-4b51-ac06-99415acfa9cb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1307.702326] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2ddb6b64-8aad-46c3-8742-54cb9392b419 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "c9a3fb0f-95bf-4b51-ac06-99415acfa9cb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1307.702534] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2ddb6b64-8aad-46c3-8742-54cb9392b419 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "c9a3fb0f-95bf-4b51-ac06-99415acfa9cb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1307.702719] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2ddb6b64-8aad-46c3-8742-54cb9392b419 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "c9a3fb0f-95bf-4b51-ac06-99415acfa9cb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1307.702884] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2ddb6b64-8aad-46c3-8742-54cb9392b419 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "c9a3fb0f-95bf-4b51-ac06-99415acfa9cb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1307.705486] env[70020]: INFO nova.compute.manager [None req-2ddb6b64-8aad-46c3-8742-54cb9392b419 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Terminating instance [ 1307.821197] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cff750e1-33a1-4a87-aa80-2f5ca8262bc0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.828637] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef7fddc9-f9b6-41ad-aa15-65e02b740ac1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.857514] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86626051-d8d5-4f26-b735-04a1a694f80e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.864131] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d7160e1-55a0-493d-9ac8-4809335d9927 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.877820] env[70020]: DEBUG nova.compute.provider_tree [None req-78d23a08-5feb-4580-bdf2-17e521728f78 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1308.014117] env[70020]: DEBUG oslo_concurrency.lockutils [req-304111b2-d0b8-41ad-b713-27ca09508d6a req-986fbeb6-7692-4a85-ae10-9d1cb58eb8af service nova] Releasing lock "refresh_cache-e96aae17-5ae5-404b-bbe3-46777f7c34d2" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1308.014362] env[70020]: DEBUG nova.compute.manager [req-304111b2-d0b8-41ad-b713-27ca09508d6a req-986fbeb6-7692-4a85-ae10-9d1cb58eb8af service nova] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Received event network-changed-8b1a9cab-123c-45b9-9703-a6e46606f140 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1308.014584] env[70020]: DEBUG nova.compute.manager [req-304111b2-d0b8-41ad-b713-27ca09508d6a req-986fbeb6-7692-4a85-ae10-9d1cb58eb8af service nova] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Refreshing instance network info cache due to event network-changed-8b1a9cab-123c-45b9-9703-a6e46606f140. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1308.014773] env[70020]: DEBUG oslo_concurrency.lockutils [req-304111b2-d0b8-41ad-b713-27ca09508d6a req-986fbeb6-7692-4a85-ae10-9d1cb58eb8af service nova] Acquiring lock "refresh_cache-4b73ae75-c403-4268-8eab-4d6c32aef950" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1308.014913] env[70020]: DEBUG oslo_concurrency.lockutils [req-304111b2-d0b8-41ad-b713-27ca09508d6a req-986fbeb6-7692-4a85-ae10-9d1cb58eb8af service nova] Acquired lock "refresh_cache-4b73ae75-c403-4268-8eab-4d6c32aef950" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1308.015088] env[70020]: DEBUG nova.network.neutron [req-304111b2-d0b8-41ad-b713-27ca09508d6a req-986fbeb6-7692-4a85-ae10-9d1cb58eb8af service nova] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Refreshing network info cache for port 8b1a9cab-123c-45b9-9703-a6e46606f140 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1308.210069] env[70020]: DEBUG nova.compute.manager [None req-2ddb6b64-8aad-46c3-8742-54cb9392b419 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1308.210326] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2ddb6b64-8aad-46c3-8742-54cb9392b419 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1308.211250] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c29272c7-68e5-4e1e-bbad-9208ff55beae {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.219279] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ddb6b64-8aad-46c3-8742-54cb9392b419 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1308.219510] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-88966238-f3c2-4188-9cf7-a7462d8203b6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.226256] env[70020]: DEBUG oslo_vmware.api [None req-2ddb6b64-8aad-46c3-8742-54cb9392b419 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1308.226256] env[70020]: value = "task-3619326" [ 1308.226256] env[70020]: _type = "Task" [ 1308.226256] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.233803] env[70020]: DEBUG oslo_vmware.api [None req-2ddb6b64-8aad-46c3-8742-54cb9392b419 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619326, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.381536] env[70020]: DEBUG nova.scheduler.client.report [None req-78d23a08-5feb-4580-bdf2-17e521728f78 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1308.717330] env[70020]: DEBUG nova.network.neutron [req-304111b2-d0b8-41ad-b713-27ca09508d6a req-986fbeb6-7692-4a85-ae10-9d1cb58eb8af service nova] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Updated VIF entry in instance network info cache for port 8b1a9cab-123c-45b9-9703-a6e46606f140. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1308.717843] env[70020]: DEBUG nova.network.neutron [req-304111b2-d0b8-41ad-b713-27ca09508d6a req-986fbeb6-7692-4a85-ae10-9d1cb58eb8af service nova] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Updating instance_info_cache with network_info: [{"id": "8b1a9cab-123c-45b9-9703-a6e46606f140", "address": "fa:16:3e:0b:81:cc", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b1a9cab-12", "ovs_interfaceid": "8b1a9cab-123c-45b9-9703-a6e46606f140", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1308.736042] env[70020]: DEBUG oslo_vmware.api [None req-2ddb6b64-8aad-46c3-8742-54cb9392b419 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619326, 'name': PowerOffVM_Task, 'duration_secs': 0.244638} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.736279] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ddb6b64-8aad-46c3-8742-54cb9392b419 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1308.736446] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2ddb6b64-8aad-46c3-8742-54cb9392b419 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1308.736711] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c060418d-f925-4233-b627-0f1df33ad66d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.799338] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2ddb6b64-8aad-46c3-8742-54cb9392b419 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1308.799561] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2ddb6b64-8aad-46c3-8742-54cb9392b419 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1308.799778] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ddb6b64-8aad-46c3-8742-54cb9392b419 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Deleting the datastore file [datastore1] c9a3fb0f-95bf-4b51-ac06-99415acfa9cb {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1308.800018] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b4923863-7243-43a2-9400-2131ad243c3c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.806669] env[70020]: DEBUG oslo_vmware.api [None req-2ddb6b64-8aad-46c3-8742-54cb9392b419 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1308.806669] env[70020]: value = "task-3619328" [ 1308.806669] env[70020]: _type = "Task" [ 1308.806669] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.814510] env[70020]: DEBUG oslo_vmware.api [None req-2ddb6b64-8aad-46c3-8742-54cb9392b419 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619328, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.887054] env[70020]: DEBUG oslo_concurrency.lockutils [None req-78d23a08-5feb-4580-bdf2-17e521728f78 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.653s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1308.902216] env[70020]: DEBUG nova.compute.manager [req-a2a56704-a061-4da9-ae2e-059b615581f0 req-6087e0ea-9fd6-47e4-8047-6a53dfedeb0b service nova] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Received event network-vif-plugged-b72ae1ea-1ffa-4b9c-a6eb-103a9e5ed0ae {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1308.902436] env[70020]: DEBUG oslo_concurrency.lockutils [req-a2a56704-a061-4da9-ae2e-059b615581f0 req-6087e0ea-9fd6-47e4-8047-6a53dfedeb0b service nova] Acquiring lock "4b73ae75-c403-4268-8eab-4d6c32aef950-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1308.902641] env[70020]: DEBUG oslo_concurrency.lockutils [req-a2a56704-a061-4da9-ae2e-059b615581f0 req-6087e0ea-9fd6-47e4-8047-6a53dfedeb0b service nova] Lock "4b73ae75-c403-4268-8eab-4d6c32aef950-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1308.902803] env[70020]: DEBUG oslo_concurrency.lockutils [req-a2a56704-a061-4da9-ae2e-059b615581f0 req-6087e0ea-9fd6-47e4-8047-6a53dfedeb0b service nova] Lock "4b73ae75-c403-4268-8eab-4d6c32aef950-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1308.902963] env[70020]: DEBUG nova.compute.manager [req-a2a56704-a061-4da9-ae2e-059b615581f0 req-6087e0ea-9fd6-47e4-8047-6a53dfedeb0b service nova] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] No waiting events found dispatching network-vif-plugged-b72ae1ea-1ffa-4b9c-a6eb-103a9e5ed0ae {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1308.904187] env[70020]: WARNING nova.compute.manager [req-a2a56704-a061-4da9-ae2e-059b615581f0 req-6087e0ea-9fd6-47e4-8047-6a53dfedeb0b service nova] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Received unexpected event network-vif-plugged-b72ae1ea-1ffa-4b9c-a6eb-103a9e5ed0ae for instance with vm_state active and task_state None. [ 1308.914425] env[70020]: INFO nova.scheduler.client.report [None req-78d23a08-5feb-4580-bdf2-17e521728f78 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Deleted allocations for instance 845ea37a-9945-49cd-a1bd-3da91f4af16b [ 1308.992159] env[70020]: DEBUG nova.network.neutron [None req-f2affe58-a2b3-494b-976f-f629b956740f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Successfully updated port: b72ae1ea-1ffa-4b9c-a6eb-103a9e5ed0ae {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1309.221147] env[70020]: DEBUG oslo_concurrency.lockutils [req-304111b2-d0b8-41ad-b713-27ca09508d6a req-986fbeb6-7692-4a85-ae10-9d1cb58eb8af service nova] Releasing lock "refresh_cache-4b73ae75-c403-4268-8eab-4d6c32aef950" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1309.221454] env[70020]: DEBUG nova.compute.manager [req-304111b2-d0b8-41ad-b713-27ca09508d6a req-986fbeb6-7692-4a85-ae10-9d1cb58eb8af service nova] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Received event network-vif-deleted-f4583380-5208-4372-ab67-cc6b64a287d2 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1309.317254] env[70020]: DEBUG oslo_vmware.api [None req-2ddb6b64-8aad-46c3-8742-54cb9392b419 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619328, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138661} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.317716] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ddb6b64-8aad-46c3-8742-54cb9392b419 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1309.317716] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2ddb6b64-8aad-46c3-8742-54cb9392b419 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1309.317902] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-2ddb6b64-8aad-46c3-8742-54cb9392b419 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1309.318081] env[70020]: INFO nova.compute.manager [None req-2ddb6b64-8aad-46c3-8742-54cb9392b419 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1309.318315] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2ddb6b64-8aad-46c3-8742-54cb9392b419 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1309.318533] env[70020]: DEBUG nova.compute.manager [-] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1309.318637] env[70020]: DEBUG nova.network.neutron [-] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1309.421966] env[70020]: DEBUG oslo_concurrency.lockutils [None req-78d23a08-5feb-4580-bdf2-17e521728f78 tempest-ServerActionsTestJSON-1661939768 tempest-ServerActionsTestJSON-1661939768-project-member] Lock "845ea37a-9945-49cd-a1bd-3da91f4af16b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.041s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1309.499293] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f2affe58-a2b3-494b-976f-f629b956740f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "refresh_cache-4b73ae75-c403-4268-8eab-4d6c32aef950" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1309.499293] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f2affe58-a2b3-494b-976f-f629b956740f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquired lock "refresh_cache-4b73ae75-c403-4268-8eab-4d6c32aef950" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1309.499293] env[70020]: DEBUG nova.network.neutron [None req-f2affe58-a2b3-494b-976f-f629b956740f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1310.047328] env[70020]: WARNING nova.network.neutron [None req-f2affe58-a2b3-494b-976f-f629b956740f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] 95372772-c776-417b-938a-f27c0d43d6ec already exists in list: networks containing: ['95372772-c776-417b-938a-f27c0d43d6ec']. ignoring it [ 1310.049577] env[70020]: DEBUG nova.network.neutron [-] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1310.551938] env[70020]: INFO nova.compute.manager [-] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Took 1.23 seconds to deallocate network for instance. [ 1310.671199] env[70020]: DEBUG nova.network.neutron [None req-f2affe58-a2b3-494b-976f-f629b956740f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Updating instance_info_cache with network_info: [{"id": "8b1a9cab-123c-45b9-9703-a6e46606f140", "address": "fa:16:3e:0b:81:cc", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b1a9cab-12", "ovs_interfaceid": "8b1a9cab-123c-45b9-9703-a6e46606f140", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b72ae1ea-1ffa-4b9c-a6eb-103a9e5ed0ae", "address": "fa:16:3e:fa:67:6b", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb72ae1ea-1f", "ovs_interfaceid": "b72ae1ea-1ffa-4b9c-a6eb-103a9e5ed0ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1310.934859] env[70020]: DEBUG nova.compute.manager [req-d5a143ae-aac0-49e2-94f7-d3af3539d134 req-d4b83d7f-db17-42ea-810a-90ffc8064f46 service nova] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Received event network-changed-b72ae1ea-1ffa-4b9c-a6eb-103a9e5ed0ae {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1310.935034] env[70020]: DEBUG nova.compute.manager [req-d5a143ae-aac0-49e2-94f7-d3af3539d134 req-d4b83d7f-db17-42ea-810a-90ffc8064f46 service nova] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Refreshing instance network info cache due to event network-changed-b72ae1ea-1ffa-4b9c-a6eb-103a9e5ed0ae. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1310.935234] env[70020]: DEBUG oslo_concurrency.lockutils [req-d5a143ae-aac0-49e2-94f7-d3af3539d134 req-d4b83d7f-db17-42ea-810a-90ffc8064f46 service nova] Acquiring lock "refresh_cache-4b73ae75-c403-4268-8eab-4d6c32aef950" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1311.059845] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2ddb6b64-8aad-46c3-8742-54cb9392b419 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1311.059845] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2ddb6b64-8aad-46c3-8742-54cb9392b419 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1311.059845] env[70020]: DEBUG nova.objects.instance [None req-2ddb6b64-8aad-46c3-8742-54cb9392b419 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lazy-loading 'resources' on Instance uuid c9a3fb0f-95bf-4b51-ac06-99415acfa9cb {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1311.175112] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f2affe58-a2b3-494b-976f-f629b956740f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Releasing lock "refresh_cache-4b73ae75-c403-4268-8eab-4d6c32aef950" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1311.175112] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f2affe58-a2b3-494b-976f-f629b956740f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "4b73ae75-c403-4268-8eab-4d6c32aef950" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1311.175255] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f2affe58-a2b3-494b-976f-f629b956740f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquired lock "4b73ae75-c403-4268-8eab-4d6c32aef950" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1311.175554] env[70020]: DEBUG oslo_concurrency.lockutils [req-d5a143ae-aac0-49e2-94f7-d3af3539d134 req-d4b83d7f-db17-42ea-810a-90ffc8064f46 service nova] Acquired lock "refresh_cache-4b73ae75-c403-4268-8eab-4d6c32aef950" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1311.175730] env[70020]: DEBUG nova.network.neutron [req-d5a143ae-aac0-49e2-94f7-d3af3539d134 req-d4b83d7f-db17-42ea-810a-90ffc8064f46 service nova] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Refreshing network info cache for port b72ae1ea-1ffa-4b9c-a6eb-103a9e5ed0ae {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1311.177509] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26ea7f3d-384d-4417-943c-8fdc697d0d91 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.194951] env[70020]: DEBUG nova.virt.hardware [None req-f2affe58-a2b3-494b-976f-f629b956740f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1311.195204] env[70020]: DEBUG nova.virt.hardware [None req-f2affe58-a2b3-494b-976f-f629b956740f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1311.195357] env[70020]: DEBUG nova.virt.hardware [None req-f2affe58-a2b3-494b-976f-f629b956740f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1311.195533] env[70020]: DEBUG nova.virt.hardware [None req-f2affe58-a2b3-494b-976f-f629b956740f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1311.195677] env[70020]: DEBUG nova.virt.hardware [None req-f2affe58-a2b3-494b-976f-f629b956740f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1311.195818] env[70020]: DEBUG nova.virt.hardware [None req-f2affe58-a2b3-494b-976f-f629b956740f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1311.196019] env[70020]: DEBUG nova.virt.hardware [None req-f2affe58-a2b3-494b-976f-f629b956740f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1311.196177] env[70020]: DEBUG nova.virt.hardware [None req-f2affe58-a2b3-494b-976f-f629b956740f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1311.196337] env[70020]: DEBUG nova.virt.hardware [None req-f2affe58-a2b3-494b-976f-f629b956740f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1311.196494] env[70020]: DEBUG nova.virt.hardware [None req-f2affe58-a2b3-494b-976f-f629b956740f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1311.196660] env[70020]: DEBUG nova.virt.hardware [None req-f2affe58-a2b3-494b-976f-f629b956740f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1311.203041] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-f2affe58-a2b3-494b-976f-f629b956740f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Reconfiguring VM to attach interface {{(pid=70020) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1311.203627] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-815e9ed0-7239-455c-b84f-17bd2a82f299 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.221760] env[70020]: DEBUG oslo_vmware.api [None req-f2affe58-a2b3-494b-976f-f629b956740f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1311.221760] env[70020]: value = "task-3619329" [ 1311.221760] env[70020]: _type = "Task" [ 1311.221760] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.230275] env[70020]: DEBUG oslo_vmware.api [None req-f2affe58-a2b3-494b-976f-f629b956740f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619329, 'name': ReconfigVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.660339] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9111e755-67c2-4e58-bfe9-cc9b4d03d502 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.668211] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6be8036f-16b4-4144-8340-d66a1c9d4a6f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.702222] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14964c49-74c4-4f6a-bef0-01b1fdffd242 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.710405] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c1d7d64-d6ca-45d1-ae26-2fade13073fc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.723866] env[70020]: DEBUG nova.compute.provider_tree [None req-2ddb6b64-8aad-46c3-8742-54cb9392b419 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1311.735804] env[70020]: DEBUG oslo_vmware.api [None req-f2affe58-a2b3-494b-976f-f629b956740f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619329, 'name': ReconfigVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.908360] env[70020]: DEBUG nova.network.neutron [req-d5a143ae-aac0-49e2-94f7-d3af3539d134 req-d4b83d7f-db17-42ea-810a-90ffc8064f46 service nova] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Updated VIF entry in instance network info cache for port b72ae1ea-1ffa-4b9c-a6eb-103a9e5ed0ae. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1311.908828] env[70020]: DEBUG nova.network.neutron [req-d5a143ae-aac0-49e2-94f7-d3af3539d134 req-d4b83d7f-db17-42ea-810a-90ffc8064f46 service nova] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Updating instance_info_cache with network_info: [{"id": "8b1a9cab-123c-45b9-9703-a6e46606f140", "address": "fa:16:3e:0b:81:cc", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b1a9cab-12", "ovs_interfaceid": "8b1a9cab-123c-45b9-9703-a6e46606f140", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b72ae1ea-1ffa-4b9c-a6eb-103a9e5ed0ae", "address": "fa:16:3e:fa:67:6b", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb72ae1ea-1f", "ovs_interfaceid": "b72ae1ea-1ffa-4b9c-a6eb-103a9e5ed0ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1312.233452] env[70020]: DEBUG nova.scheduler.client.report [None req-2ddb6b64-8aad-46c3-8742-54cb9392b419 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1312.236566] env[70020]: DEBUG oslo_vmware.api [None req-f2affe58-a2b3-494b-976f-f629b956740f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619329, 'name': ReconfigVM_Task, 'duration_secs': 0.624823} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.237246] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f2affe58-a2b3-494b-976f-f629b956740f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Releasing lock "4b73ae75-c403-4268-8eab-4d6c32aef950" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1312.237479] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-f2affe58-a2b3-494b-976f-f629b956740f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Reconfigured VM to attach interface {{(pid=70020) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1312.411901] env[70020]: DEBUG oslo_concurrency.lockutils [req-d5a143ae-aac0-49e2-94f7-d3af3539d134 req-d4b83d7f-db17-42ea-810a-90ffc8064f46 service nova] Releasing lock "refresh_cache-4b73ae75-c403-4268-8eab-4d6c32aef950" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1312.412195] env[70020]: DEBUG nova.compute.manager [req-d5a143ae-aac0-49e2-94f7-d3af3539d134 req-d4b83d7f-db17-42ea-810a-90ffc8064f46 service nova] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Received event network-vif-deleted-9912a098-b09d-4c69-819f-47a4d7da500b {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1312.737932] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2ddb6b64-8aad-46c3-8742-54cb9392b419 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.679s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1312.741067] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f2affe58-a2b3-494b-976f-f629b956740f tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "interface-4b73ae75-c403-4268-8eab-4d6c32aef950-b72ae1ea-1ffa-4b9c-a6eb-103a9e5ed0ae" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.353s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1312.761913] env[70020]: INFO nova.scheduler.client.report [None req-2ddb6b64-8aad-46c3-8742-54cb9392b419 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Deleted allocations for instance c9a3fb0f-95bf-4b51-ac06-99415acfa9cb [ 1313.270654] env[70020]: DEBUG oslo_concurrency.lockutils [None req-2ddb6b64-8aad-46c3-8742-54cb9392b419 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "c9a3fb0f-95bf-4b51-ac06-99415acfa9cb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.567s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1314.145180] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e662513a-6dda-4843-bb41-f478d53362a1 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "interface-4b73ae75-c403-4268-8eab-4d6c32aef950-b72ae1ea-1ffa-4b9c-a6eb-103a9e5ed0ae" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1314.145180] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e662513a-6dda-4843-bb41-f478d53362a1 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "interface-4b73ae75-c403-4268-8eab-4d6c32aef950-b72ae1ea-1ffa-4b9c-a6eb-103a9e5ed0ae" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1314.648719] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e662513a-6dda-4843-bb41-f478d53362a1 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "4b73ae75-c403-4268-8eab-4d6c32aef950" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1314.648990] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e662513a-6dda-4843-bb41-f478d53362a1 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquired lock "4b73ae75-c403-4268-8eab-4d6c32aef950" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1314.651014] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd490bd4-f8ae-4b5d-9a20-dae3166f109d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.668378] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27cf89a6-f9c2-4de0-838d-73408c7703f9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.695295] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e662513a-6dda-4843-bb41-f478d53362a1 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Reconfiguring VM to detach interface {{(pid=70020) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1314.695606] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-052be69b-a28c-42e4-9614-a250feff5b4b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.715046] env[70020]: DEBUG oslo_vmware.api [None req-e662513a-6dda-4843-bb41-f478d53362a1 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1314.715046] env[70020]: value = "task-3619331" [ 1314.715046] env[70020]: _type = "Task" [ 1314.715046] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.722348] env[70020]: DEBUG oslo_vmware.api [None req-e662513a-6dda-4843-bb41-f478d53362a1 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619331, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.224404] env[70020]: DEBUG oslo_vmware.api [None req-e662513a-6dda-4843-bb41-f478d53362a1 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619331, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.625307] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "422aa98b-fa01-42c5-90cf-ed70e9781208" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1315.625523] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "422aa98b-fa01-42c5-90cf-ed70e9781208" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1315.724988] env[70020]: DEBUG oslo_vmware.api [None req-e662513a-6dda-4843-bb41-f478d53362a1 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619331, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.128280] env[70020]: DEBUG nova.compute.manager [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1316.225717] env[70020]: DEBUG oslo_vmware.api [None req-e662513a-6dda-4843-bb41-f478d53362a1 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619331, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.648564] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1316.648866] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1316.650373] env[70020]: INFO nova.compute.claims [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1316.727190] env[70020]: DEBUG oslo_vmware.api [None req-e662513a-6dda-4843-bb41-f478d53362a1 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619331, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.227312] env[70020]: DEBUG oslo_vmware.api [None req-e662513a-6dda-4843-bb41-f478d53362a1 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619331, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.461102] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d7e49be6-ed9b-4918-b322-79e7fb5c0d89 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "96966bf2-a9ff-48ba-be3f-c767e7b6eedd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1317.461374] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d7e49be6-ed9b-4918-b322-79e7fb5c0d89 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "96966bf2-a9ff-48ba-be3f-c767e7b6eedd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1317.461580] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d7e49be6-ed9b-4918-b322-79e7fb5c0d89 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "96966bf2-a9ff-48ba-be3f-c767e7b6eedd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1317.461757] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d7e49be6-ed9b-4918-b322-79e7fb5c0d89 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "96966bf2-a9ff-48ba-be3f-c767e7b6eedd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1317.461921] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d7e49be6-ed9b-4918-b322-79e7fb5c0d89 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "96966bf2-a9ff-48ba-be3f-c767e7b6eedd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1317.463976] env[70020]: INFO nova.compute.manager [None req-d7e49be6-ed9b-4918-b322-79e7fb5c0d89 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Terminating instance [ 1317.724015] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdbc4fc2-fa4d-4f23-8450-75cec944b882 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.729568] env[70020]: DEBUG oslo_vmware.api [None req-e662513a-6dda-4843-bb41-f478d53362a1 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619331, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.734212] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4b76cb1-8f07-4159-a68c-0b0b7a322ad8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.764244] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd674132-0fee-4375-a6e4-ad6dc91ee424 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.770938] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba019f49-3097-445b-9107-d126d660b757 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.784082] env[70020]: DEBUG nova.compute.provider_tree [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1317.967955] env[70020]: DEBUG nova.compute.manager [None req-d7e49be6-ed9b-4918-b322-79e7fb5c0d89 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1317.968206] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d7e49be6-ed9b-4918-b322-79e7fb5c0d89 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1317.969090] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62e20dc8-4b10-44df-8f20-861a0202fc07 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.977076] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7e49be6-ed9b-4918-b322-79e7fb5c0d89 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1317.977295] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-18f97936-76fa-4519-8644-73fe8f992b6f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.983522] env[70020]: DEBUG oslo_vmware.api [None req-d7e49be6-ed9b-4918-b322-79e7fb5c0d89 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1317.983522] env[70020]: value = "task-3619332" [ 1317.983522] env[70020]: _type = "Task" [ 1317.983522] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.990866] env[70020]: DEBUG oslo_vmware.api [None req-d7e49be6-ed9b-4918-b322-79e7fb5c0d89 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619332, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.228181] env[70020]: DEBUG oslo_vmware.api [None req-e662513a-6dda-4843-bb41-f478d53362a1 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619331, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.287777] env[70020]: DEBUG nova.scheduler.client.report [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1318.493207] env[70020]: DEBUG oslo_vmware.api [None req-d7e49be6-ed9b-4918-b322-79e7fb5c0d89 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619332, 'name': PowerOffVM_Task, 'duration_secs': 0.206465} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.493474] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7e49be6-ed9b-4918-b322-79e7fb5c0d89 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1318.493643] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d7e49be6-ed9b-4918-b322-79e7fb5c0d89 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1318.493892] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d659b2b1-d550-4856-9efe-99baecc45ed8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.555041] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d7e49be6-ed9b-4918-b322-79e7fb5c0d89 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1318.555297] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d7e49be6-ed9b-4918-b322-79e7fb5c0d89 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1318.555461] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7e49be6-ed9b-4918-b322-79e7fb5c0d89 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Deleting the datastore file [datastore2] 96966bf2-a9ff-48ba-be3f-c767e7b6eedd {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1318.555727] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7a25cea8-53e9-4e52-ae01-f34360705a1a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.561783] env[70020]: DEBUG oslo_vmware.api [None req-d7e49be6-ed9b-4918-b322-79e7fb5c0d89 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for the task: (returnval){ [ 1318.561783] env[70020]: value = "task-3619334" [ 1318.561783] env[70020]: _type = "Task" [ 1318.561783] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.569216] env[70020]: DEBUG oslo_vmware.api [None req-d7e49be6-ed9b-4918-b322-79e7fb5c0d89 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619334, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.729681] env[70020]: DEBUG oslo_vmware.api [None req-e662513a-6dda-4843-bb41-f478d53362a1 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619331, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.792572] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.144s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1318.793104] env[70020]: DEBUG nova.compute.manager [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1319.071830] env[70020]: DEBUG oslo_vmware.api [None req-d7e49be6-ed9b-4918-b322-79e7fb5c0d89 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Task: {'id': task-3619334, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160717} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.072096] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7e49be6-ed9b-4918-b322-79e7fb5c0d89 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1319.072284] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d7e49be6-ed9b-4918-b322-79e7fb5c0d89 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1319.072461] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d7e49be6-ed9b-4918-b322-79e7fb5c0d89 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1319.072643] env[70020]: INFO nova.compute.manager [None req-d7e49be6-ed9b-4918-b322-79e7fb5c0d89 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1319.072878] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d7e49be6-ed9b-4918-b322-79e7fb5c0d89 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1319.073086] env[70020]: DEBUG nova.compute.manager [-] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1319.073183] env[70020]: DEBUG nova.network.neutron [-] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1319.232026] env[70020]: DEBUG oslo_vmware.api [None req-e662513a-6dda-4843-bb41-f478d53362a1 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619331, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.298593] env[70020]: DEBUG nova.compute.utils [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1319.299997] env[70020]: DEBUG nova.compute.manager [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1319.300180] env[70020]: DEBUG nova.network.neutron [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1319.346135] env[70020]: DEBUG nova.policy [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '274a4150c13f4ec0b34194f12b995f25', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b34ec8c1ad864be694a6f9ce2b8a7788', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1319.367884] env[70020]: DEBUG nova.compute.manager [req-a6540bd9-e6c8-4216-b299-43832dc7e7de req-ff12211a-c80a-4490-83d3-4341883655c4 service nova] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Received event network-vif-deleted-a59ccbd4-85b3-4a98-8407-29d65fea21f5 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1319.368088] env[70020]: INFO nova.compute.manager [req-a6540bd9-e6c8-4216-b299-43832dc7e7de req-ff12211a-c80a-4490-83d3-4341883655c4 service nova] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Neutron deleted interface a59ccbd4-85b3-4a98-8407-29d65fea21f5; detaching it from the instance and deleting it from the info cache [ 1319.368221] env[70020]: DEBUG nova.network.neutron [req-a6540bd9-e6c8-4216-b299-43832dc7e7de req-ff12211a-c80a-4490-83d3-4341883655c4 service nova] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1319.585663] env[70020]: DEBUG nova.network.neutron [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Successfully created port: c8b16039-3297-434c-ae9e-e75e6ee2965d {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1319.730737] env[70020]: DEBUG oslo_vmware.api [None req-e662513a-6dda-4843-bb41-f478d53362a1 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619331, 'name': ReconfigVM_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.803594] env[70020]: DEBUG nova.compute.manager [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1319.850058] env[70020]: DEBUG nova.network.neutron [-] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1319.870400] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cec70e51-96d1-439e-88ef-ce6fdb7f354f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.880668] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12edf3e7-7fc8-4c32-8d58-b89d25b7a3ed {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.906531] env[70020]: DEBUG nova.compute.manager [req-a6540bd9-e6c8-4216-b299-43832dc7e7de req-ff12211a-c80a-4490-83d3-4341883655c4 service nova] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Detach interface failed, port_id=a59ccbd4-85b3-4a98-8407-29d65fea21f5, reason: Instance 96966bf2-a9ff-48ba-be3f-c767e7b6eedd could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1320.232237] env[70020]: DEBUG oslo_vmware.api [None req-e662513a-6dda-4843-bb41-f478d53362a1 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619331, 'name': ReconfigVM_Task} progress is 18%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.352207] env[70020]: INFO nova.compute.manager [-] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Took 1.28 seconds to deallocate network for instance. [ 1320.732370] env[70020]: DEBUG oslo_vmware.api [None req-e662513a-6dda-4843-bb41-f478d53362a1 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619331, 'name': ReconfigVM_Task, 'duration_secs': 5.74819} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.735047] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e662513a-6dda-4843-bb41-f478d53362a1 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Releasing lock "4b73ae75-c403-4268-8eab-4d6c32aef950" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1320.735047] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-e662513a-6dda-4843-bb41-f478d53362a1 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Reconfigured VM to detach interface {{(pid=70020) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1320.813148] env[70020]: DEBUG nova.compute.manager [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1320.837803] env[70020]: DEBUG nova.virt.hardware [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1320.838070] env[70020]: DEBUG nova.virt.hardware [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1320.838230] env[70020]: DEBUG nova.virt.hardware [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1320.838413] env[70020]: DEBUG nova.virt.hardware [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1320.838558] env[70020]: DEBUG nova.virt.hardware [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1320.838701] env[70020]: DEBUG nova.virt.hardware [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1320.838908] env[70020]: DEBUG nova.virt.hardware [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1320.839090] env[70020]: DEBUG nova.virt.hardware [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1320.839261] env[70020]: DEBUG nova.virt.hardware [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1320.839420] env[70020]: DEBUG nova.virt.hardware [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1320.839589] env[70020]: DEBUG nova.virt.hardware [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1320.840456] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67a151dc-d464-400d-aba3-1597fb415bfe {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.848364] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92f12715-21bd-41b3-ba72-ca4c5e240cc8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.863207] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d7e49be6-ed9b-4918-b322-79e7fb5c0d89 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1320.863389] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d7e49be6-ed9b-4918-b322-79e7fb5c0d89 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1320.863604] env[70020]: DEBUG nova.objects.instance [None req-d7e49be6-ed9b-4918-b322-79e7fb5c0d89 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lazy-loading 'resources' on Instance uuid 96966bf2-a9ff-48ba-be3f-c767e7b6eedd {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1320.938371] env[70020]: DEBUG nova.compute.manager [req-9b89c3f5-ea2b-4738-8294-8872986a6152 req-e8aabf43-1815-4869-b111-4611ab5bc8cd service nova] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Received event network-vif-plugged-c8b16039-3297-434c-ae9e-e75e6ee2965d {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1320.938592] env[70020]: DEBUG oslo_concurrency.lockutils [req-9b89c3f5-ea2b-4738-8294-8872986a6152 req-e8aabf43-1815-4869-b111-4611ab5bc8cd service nova] Acquiring lock "422aa98b-fa01-42c5-90cf-ed70e9781208-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1320.938801] env[70020]: DEBUG oslo_concurrency.lockutils [req-9b89c3f5-ea2b-4738-8294-8872986a6152 req-e8aabf43-1815-4869-b111-4611ab5bc8cd service nova] Lock "422aa98b-fa01-42c5-90cf-ed70e9781208-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1320.938969] env[70020]: DEBUG oslo_concurrency.lockutils [req-9b89c3f5-ea2b-4738-8294-8872986a6152 req-e8aabf43-1815-4869-b111-4611ab5bc8cd service nova] Lock "422aa98b-fa01-42c5-90cf-ed70e9781208-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1320.939165] env[70020]: DEBUG nova.compute.manager [req-9b89c3f5-ea2b-4738-8294-8872986a6152 req-e8aabf43-1815-4869-b111-4611ab5bc8cd service nova] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] No waiting events found dispatching network-vif-plugged-c8b16039-3297-434c-ae9e-e75e6ee2965d {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1320.941019] env[70020]: WARNING nova.compute.manager [req-9b89c3f5-ea2b-4738-8294-8872986a6152 req-e8aabf43-1815-4869-b111-4611ab5bc8cd service nova] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Received unexpected event network-vif-plugged-c8b16039-3297-434c-ae9e-e75e6ee2965d for instance with vm_state building and task_state spawning. [ 1321.049843] env[70020]: DEBUG nova.network.neutron [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Successfully updated port: c8b16039-3297-434c-ae9e-e75e6ee2965d {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1321.440197] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da72b56e-8a9b-4e05-b33a-84be6b027c45 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.447689] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8b47f2d-2018-443d-84db-b5f55dcb27de {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.477522] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c64b9a8a-1c1d-44e0-b679-986928ed1f3f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.484749] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4732f7a-635d-4056-82bf-0482a16989c0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.498727] env[70020]: DEBUG nova.compute.provider_tree [None req-d7e49be6-ed9b-4918-b322-79e7fb5c0d89 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1321.552763] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "refresh_cache-422aa98b-fa01-42c5-90cf-ed70e9781208" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1321.552891] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquired lock "refresh_cache-422aa98b-fa01-42c5-90cf-ed70e9781208" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1321.553048] env[70020]: DEBUG nova.network.neutron [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1322.001888] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e662513a-6dda-4843-bb41-f478d53362a1 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "refresh_cache-4b73ae75-c403-4268-8eab-4d6c32aef950" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1322.002096] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e662513a-6dda-4843-bb41-f478d53362a1 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquired lock "refresh_cache-4b73ae75-c403-4268-8eab-4d6c32aef950" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1322.002284] env[70020]: DEBUG nova.network.neutron [None req-e662513a-6dda-4843-bb41-f478d53362a1 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1322.004045] env[70020]: DEBUG nova.scheduler.client.report [None req-d7e49be6-ed9b-4918-b322-79e7fb5c0d89 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1322.085533] env[70020]: DEBUG nova.network.neutron [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1322.241209] env[70020]: DEBUG nova.network.neutron [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Updating instance_info_cache with network_info: [{"id": "c8b16039-3297-434c-ae9e-e75e6ee2965d", "address": "fa:16:3e:e4:c2:e7", "network": {"id": "e7ac5730-3f56-469f-b3b1-25a06edc1284", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-765496064-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b34ec8c1ad864be694a6f9ce2b8a7788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8b16039-32", "ovs_interfaceid": "c8b16039-3297-434c-ae9e-e75e6ee2965d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1322.510497] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d7e49be6-ed9b-4918-b322-79e7fb5c0d89 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.647s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1322.529529] env[70020]: INFO nova.scheduler.client.report [None req-d7e49be6-ed9b-4918-b322-79e7fb5c0d89 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Deleted allocations for instance 96966bf2-a9ff-48ba-be3f-c767e7b6eedd [ 1322.566943] env[70020]: DEBUG oslo_concurrency.lockutils [None req-498e3b68-1ac5-4c16-a1ca-31ab2c527638 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "4b73ae75-c403-4268-8eab-4d6c32aef950" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1322.567275] env[70020]: DEBUG oslo_concurrency.lockutils [None req-498e3b68-1ac5-4c16-a1ca-31ab2c527638 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "4b73ae75-c403-4268-8eab-4d6c32aef950" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1322.567714] env[70020]: DEBUG oslo_concurrency.lockutils [None req-498e3b68-1ac5-4c16-a1ca-31ab2c527638 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "4b73ae75-c403-4268-8eab-4d6c32aef950-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1322.567930] env[70020]: DEBUG oslo_concurrency.lockutils [None req-498e3b68-1ac5-4c16-a1ca-31ab2c527638 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "4b73ae75-c403-4268-8eab-4d6c32aef950-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1322.568184] env[70020]: DEBUG oslo_concurrency.lockutils [None req-498e3b68-1ac5-4c16-a1ca-31ab2c527638 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "4b73ae75-c403-4268-8eab-4d6c32aef950-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1322.570234] env[70020]: INFO nova.compute.manager [None req-498e3b68-1ac5-4c16-a1ca-31ab2c527638 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Terminating instance [ 1322.738057] env[70020]: INFO nova.network.neutron [None req-e662513a-6dda-4843-bb41-f478d53362a1 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Port b72ae1ea-1ffa-4b9c-a6eb-103a9e5ed0ae from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1322.738407] env[70020]: DEBUG nova.network.neutron [None req-e662513a-6dda-4843-bb41-f478d53362a1 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Updating instance_info_cache with network_info: [{"id": "8b1a9cab-123c-45b9-9703-a6e46606f140", "address": "fa:16:3e:0b:81:cc", "network": {"id": "95372772-c776-417b-938a-f27c0d43d6ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1386008869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0573da12f56f4b18a103e4e9fdfb9c19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b1a9cab-12", "ovs_interfaceid": "8b1a9cab-123c-45b9-9703-a6e46606f140", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1322.743301] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Releasing lock "refresh_cache-422aa98b-fa01-42c5-90cf-ed70e9781208" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1322.743554] env[70020]: DEBUG nova.compute.manager [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Instance network_info: |[{"id": "c8b16039-3297-434c-ae9e-e75e6ee2965d", "address": "fa:16:3e:e4:c2:e7", "network": {"id": "e7ac5730-3f56-469f-b3b1-25a06edc1284", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-765496064-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b34ec8c1ad864be694a6f9ce2b8a7788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8b16039-32", "ovs_interfaceid": "c8b16039-3297-434c-ae9e-e75e6ee2965d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1322.743903] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:c2:e7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1e7a4976-597e-4636-990e-6062b5faadee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c8b16039-3297-434c-ae9e-e75e6ee2965d', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1322.751245] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Creating folder: Project (b34ec8c1ad864be694a6f9ce2b8a7788). Parent ref: group-v721521. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1322.752020] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3e8e64b5-6e30-453f-8ec8-944da8d6c649 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.764347] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Created folder: Project (b34ec8c1ad864be694a6f9ce2b8a7788) in parent group-v721521. [ 1322.764521] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Creating folder: Instances. Parent ref: group-v721847. {{(pid=70020) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1322.764732] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0c2f14b7-74e9-4ae5-bc8f-e1083b76a6ae {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.773124] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Created folder: Instances in parent group-v721847. [ 1322.773325] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1322.773493] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1322.773671] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-789aeba7-739f-44d4-9dd2-f98d95f17179 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.791526] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1322.791526] env[70020]: value = "task-3619337" [ 1322.791526] env[70020]: _type = "Task" [ 1322.791526] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1322.798414] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619337, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.968560] env[70020]: DEBUG nova.compute.manager [req-7669fedb-03ff-4e17-93e5-b4a4ee263e82 req-5c6d0b45-bd45-428b-8f0e-7f669cc387b4 service nova] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Received event network-changed-c8b16039-3297-434c-ae9e-e75e6ee2965d {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1322.968752] env[70020]: DEBUG nova.compute.manager [req-7669fedb-03ff-4e17-93e5-b4a4ee263e82 req-5c6d0b45-bd45-428b-8f0e-7f669cc387b4 service nova] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Refreshing instance network info cache due to event network-changed-c8b16039-3297-434c-ae9e-e75e6ee2965d. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1322.968989] env[70020]: DEBUG oslo_concurrency.lockutils [req-7669fedb-03ff-4e17-93e5-b4a4ee263e82 req-5c6d0b45-bd45-428b-8f0e-7f669cc387b4 service nova] Acquiring lock "refresh_cache-422aa98b-fa01-42c5-90cf-ed70e9781208" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1322.969171] env[70020]: DEBUG oslo_concurrency.lockutils [req-7669fedb-03ff-4e17-93e5-b4a4ee263e82 req-5c6d0b45-bd45-428b-8f0e-7f669cc387b4 service nova] Acquired lock "refresh_cache-422aa98b-fa01-42c5-90cf-ed70e9781208" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1322.969333] env[70020]: DEBUG nova.network.neutron [req-7669fedb-03ff-4e17-93e5-b4a4ee263e82 req-5c6d0b45-bd45-428b-8f0e-7f669cc387b4 service nova] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Refreshing network info cache for port c8b16039-3297-434c-ae9e-e75e6ee2965d {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1323.037019] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d7e49be6-ed9b-4918-b322-79e7fb5c0d89 tempest-ServerActionsTestOtherA-108106573 tempest-ServerActionsTestOtherA-108106573-project-member] Lock "96966bf2-a9ff-48ba-be3f-c767e7b6eedd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.576s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1323.075989] env[70020]: DEBUG nova.compute.manager [None req-498e3b68-1ac5-4c16-a1ca-31ab2c527638 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1323.076235] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-498e3b68-1ac5-4c16-a1ca-31ab2c527638 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1323.077386] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-464cdf0a-0943-4393-a33e-ac785d6624d4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.084995] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-498e3b68-1ac5-4c16-a1ca-31ab2c527638 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1323.085243] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3bb8687e-f4f1-4188-aac8-2cee0d3b02fe {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.091070] env[70020]: DEBUG oslo_vmware.api [None req-498e3b68-1ac5-4c16-a1ca-31ab2c527638 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1323.091070] env[70020]: value = "task-3619338" [ 1323.091070] env[70020]: _type = "Task" [ 1323.091070] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.098861] env[70020]: DEBUG oslo_vmware.api [None req-498e3b68-1ac5-4c16-a1ca-31ab2c527638 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619338, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.241222] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e662513a-6dda-4843-bb41-f478d53362a1 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Releasing lock "refresh_cache-4b73ae75-c403-4268-8eab-4d6c32aef950" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1323.302914] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619337, 'name': CreateVM_Task, 'duration_secs': 0.376741} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.303147] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1323.303784] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1323.303948] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1323.304270] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1323.304517] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61de22ac-df9e-4197-bf85-c85ffbd8bf8c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.308729] env[70020]: DEBUG oslo_vmware.api [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1323.308729] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]525e7cdf-ee7d-0bbd-7cc4-f67b4c1dd218" [ 1323.308729] env[70020]: _type = "Task" [ 1323.308729] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.316481] env[70020]: DEBUG oslo_vmware.api [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]525e7cdf-ee7d-0bbd-7cc4-f67b4c1dd218, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.600782] env[70020]: DEBUG oslo_vmware.api [None req-498e3b68-1ac5-4c16-a1ca-31ab2c527638 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619338, 'name': PowerOffVM_Task, 'duration_secs': 0.166569} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.603435] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-498e3b68-1ac5-4c16-a1ca-31ab2c527638 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1323.603605] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-498e3b68-1ac5-4c16-a1ca-31ab2c527638 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1323.603865] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8cb0d7c9-3f02-46f6-af29-2c3646bba356 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.665554] env[70020]: DEBUG nova.network.neutron [req-7669fedb-03ff-4e17-93e5-b4a4ee263e82 req-5c6d0b45-bd45-428b-8f0e-7f669cc387b4 service nova] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Updated VIF entry in instance network info cache for port c8b16039-3297-434c-ae9e-e75e6ee2965d. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1323.665554] env[70020]: DEBUG nova.network.neutron [req-7669fedb-03ff-4e17-93e5-b4a4ee263e82 req-5c6d0b45-bd45-428b-8f0e-7f669cc387b4 service nova] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Updating instance_info_cache with network_info: [{"id": "c8b16039-3297-434c-ae9e-e75e6ee2965d", "address": "fa:16:3e:e4:c2:e7", "network": {"id": "e7ac5730-3f56-469f-b3b1-25a06edc1284", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-765496064-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b34ec8c1ad864be694a6f9ce2b8a7788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8b16039-32", "ovs_interfaceid": "c8b16039-3297-434c-ae9e-e75e6ee2965d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1323.668734] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-498e3b68-1ac5-4c16-a1ca-31ab2c527638 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1323.669042] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-498e3b68-1ac5-4c16-a1ca-31ab2c527638 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1323.669333] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-498e3b68-1ac5-4c16-a1ca-31ab2c527638 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Deleting the datastore file [datastore2] 4b73ae75-c403-4268-8eab-4d6c32aef950 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1323.669978] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4e9fc3d0-a44c-4b43-9785-9d00187a3d8a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.677294] env[70020]: DEBUG oslo_vmware.api [None req-498e3b68-1ac5-4c16-a1ca-31ab2c527638 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1323.677294] env[70020]: value = "task-3619340" [ 1323.677294] env[70020]: _type = "Task" [ 1323.677294] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.685426] env[70020]: DEBUG oslo_vmware.api [None req-498e3b68-1ac5-4c16-a1ca-31ab2c527638 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619340, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.744953] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e662513a-6dda-4843-bb41-f478d53362a1 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "interface-4b73ae75-c403-4268-8eab-4d6c32aef950-b72ae1ea-1ffa-4b9c-a6eb-103a9e5ed0ae" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.600s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1323.819423] env[70020]: DEBUG oslo_vmware.api [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]525e7cdf-ee7d-0bbd-7cc4-f67b4c1dd218, 'name': SearchDatastore_Task, 'duration_secs': 0.012175} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.819720] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1323.819938] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1323.820179] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1323.820327] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1323.821035] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1323.821035] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9abb431d-e913-4ef5-97e9-6a27cada8fd5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.829706] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1323.829706] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1323.830394] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-423bdf96-382e-44d6-8faf-1d36fa9e819d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.836172] env[70020]: DEBUG oslo_vmware.api [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1323.836172] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52b9505b-5f84-6a6b-7faf-b5c334fc44e7" [ 1323.836172] env[70020]: _type = "Task" [ 1323.836172] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.844389] env[70020]: DEBUG oslo_vmware.api [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b9505b-5f84-6a6b-7faf-b5c334fc44e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.134924] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager.update_available_resource {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1324.171568] env[70020]: DEBUG oslo_concurrency.lockutils [req-7669fedb-03ff-4e17-93e5-b4a4ee263e82 req-5c6d0b45-bd45-428b-8f0e-7f669cc387b4 service nova] Releasing lock "refresh_cache-422aa98b-fa01-42c5-90cf-ed70e9781208" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1324.196224] env[70020]: DEBUG oslo_vmware.api [None req-498e3b68-1ac5-4c16-a1ca-31ab2c527638 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619340, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161144} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.196700] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-498e3b68-1ac5-4c16-a1ca-31ab2c527638 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1324.196889] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-498e3b68-1ac5-4c16-a1ca-31ab2c527638 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1324.197110] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-498e3b68-1ac5-4c16-a1ca-31ab2c527638 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1324.197289] env[70020]: INFO nova.compute.manager [None req-498e3b68-1ac5-4c16-a1ca-31ab2c527638 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1324.197563] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-498e3b68-1ac5-4c16-a1ca-31ab2c527638 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1324.197937] env[70020]: DEBUG nova.compute.manager [-] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1324.198035] env[70020]: DEBUG nova.network.neutron [-] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1324.348115] env[70020]: DEBUG oslo_vmware.api [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52b9505b-5f84-6a6b-7faf-b5c334fc44e7, 'name': SearchDatastore_Task, 'duration_secs': 0.008981} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.349318] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc264169-8b3e-49df-9421-dcaab4f09834 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.355020] env[70020]: DEBUG oslo_vmware.api [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1324.355020] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5279a3dd-d1b4-348e-e56e-d36780360de3" [ 1324.355020] env[70020]: _type = "Task" [ 1324.355020] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.363390] env[70020]: DEBUG oslo_vmware.api [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5279a3dd-d1b4-348e-e56e-d36780360de3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.641113] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1324.641113] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1324.641113] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1324.641113] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=70020) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1324.641113] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55cf74a0-8a59-41db-ae3a-f05f3557cd31 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.654638] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7336b174-b01f-41b0-b7aa-9b2b95da646f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.671765] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41f6a539-78c6-45c8-865e-8554b85d258e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.679546] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8a3b6bc-cff5-45e1-8a3a-b7f8afdb4714 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.718419] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180044MB free_disk=76GB free_vcpus=48 pci_devices=None {{(pid=70020) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1324.718586] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1324.719199] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1324.867325] env[70020]: DEBUG oslo_vmware.api [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5279a3dd-d1b4-348e-e56e-d36780360de3, 'name': SearchDatastore_Task, 'duration_secs': 0.01026} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.867325] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1324.867325] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 422aa98b-fa01-42c5-90cf-ed70e9781208/422aa98b-fa01-42c5-90cf-ed70e9781208.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1324.867325] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ecfdf9c5-6454-402a-b849-78dbb511bfaf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.874071] env[70020]: DEBUG oslo_vmware.api [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1324.874071] env[70020]: value = "task-3619341" [ 1324.874071] env[70020]: _type = "Task" [ 1324.874071] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.888415] env[70020]: DEBUG oslo_vmware.api [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619341, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.000071] env[70020]: DEBUG nova.compute.manager [req-161ec7e0-220d-4f2e-bde2-895176c49b52 req-f64344bc-48e3-44fa-bf1c-a22a78207940 service nova] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Received event network-vif-deleted-8b1a9cab-123c-45b9-9703-a6e46606f140 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1325.000254] env[70020]: INFO nova.compute.manager [req-161ec7e0-220d-4f2e-bde2-895176c49b52 req-f64344bc-48e3-44fa-bf1c-a22a78207940 service nova] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Neutron deleted interface 8b1a9cab-123c-45b9-9703-a6e46606f140; detaching it from the instance and deleting it from the info cache [ 1325.000408] env[70020]: DEBUG nova.network.neutron [req-161ec7e0-220d-4f2e-bde2-895176c49b52 req-f64344bc-48e3-44fa-bf1c-a22a78207940 service nova] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1325.384469] env[70020]: DEBUG oslo_vmware.api [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619341, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.420338] env[70020]: DEBUG nova.network.neutron [-] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1325.504044] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-911b5eb9-70ea-470d-ab8f-54fccded6970 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.513050] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ac9a56b-68c7-43aa-b10d-39e3acabba32 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.541426] env[70020]: DEBUG nova.compute.manager [req-161ec7e0-220d-4f2e-bde2-895176c49b52 req-f64344bc-48e3-44fa-bf1c-a22a78207940 service nova] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Detach interface failed, port_id=8b1a9cab-123c-45b9-9703-a6e46606f140, reason: Instance 4b73ae75-c403-4268-8eab-4d6c32aef950 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1325.750032] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance e96aae17-5ae5-404b-bbe3-46777f7c34d2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1325.750032] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 4b73ae75-c403-4268-8eab-4d6c32aef950 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1325.750032] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 399d55b7-2a79-4849-89b6-ff8d1c0d33e1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1325.750032] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 422aa98b-fa01-42c5-90cf-ed70e9781208 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1325.750435] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=70020) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1325.750435] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=70020) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1325.803545] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71072fe0-a600-4198-9606-40e150747f93 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.811903] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8adf2f2e-a64d-4540-ad57-549499a4c875 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.841472] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaca2bed-83ad-4a68-a6f7-d39aebf349ed {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.848834] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71eb4f24-060c-4dd6-9900-59b749f7a8ad {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.861932] env[70020]: DEBUG nova.compute.provider_tree [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1325.883225] env[70020]: DEBUG oslo_vmware.api [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619341, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.536039} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.883458] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 422aa98b-fa01-42c5-90cf-ed70e9781208/422aa98b-fa01-42c5-90cf-ed70e9781208.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1325.883690] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1325.883916] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0ed8a690-0fa0-4c71-8585-59da6894a017 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.890226] env[70020]: DEBUG oslo_vmware.api [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1325.890226] env[70020]: value = "task-3619342" [ 1325.890226] env[70020]: _type = "Task" [ 1325.890226] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.896921] env[70020]: DEBUG oslo_vmware.api [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619342, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.923078] env[70020]: INFO nova.compute.manager [-] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Took 1.72 seconds to deallocate network for instance. [ 1326.365047] env[70020]: DEBUG nova.scheduler.client.report [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1326.400392] env[70020]: DEBUG oslo_vmware.api [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619342, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087771} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1326.400667] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1326.401434] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afe9d0b9-a859-4d5d-a135-886c5fe64ea4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.423323] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Reconfiguring VM instance instance-0000007d to attach disk [datastore1] 422aa98b-fa01-42c5-90cf-ed70e9781208/422aa98b-fa01-42c5-90cf-ed70e9781208.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1326.423506] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1980c4a7-7513-4238-ab3d-8178594b236e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.437576] env[70020]: DEBUG oslo_concurrency.lockutils [None req-498e3b68-1ac5-4c16-a1ca-31ab2c527638 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1326.442283] env[70020]: DEBUG oslo_vmware.api [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1326.442283] env[70020]: value = "task-3619343" [ 1326.442283] env[70020]: _type = "Task" [ 1326.442283] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.449761] env[70020]: DEBUG oslo_vmware.api [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619343, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.869869] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=70020) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1326.869869] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.151s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1326.869869] env[70020]: DEBUG oslo_concurrency.lockutils [None req-498e3b68-1ac5-4c16-a1ca-31ab2c527638 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.432s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1326.870213] env[70020]: DEBUG nova.objects.instance [None req-498e3b68-1ac5-4c16-a1ca-31ab2c527638 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lazy-loading 'resources' on Instance uuid 4b73ae75-c403-4268-8eab-4d6c32aef950 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1326.952660] env[70020]: DEBUG oslo_vmware.api [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619343, 'name': ReconfigVM_Task, 'duration_secs': 0.308988} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1326.952986] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Reconfigured VM instance instance-0000007d to attach disk [datastore1] 422aa98b-fa01-42c5-90cf-ed70e9781208/422aa98b-fa01-42c5-90cf-ed70e9781208.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1326.953561] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c4d8e9d2-9bfc-434c-82e3-5b9df1925381 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.960314] env[70020]: DEBUG oslo_vmware.api [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1326.960314] env[70020]: value = "task-3619344" [ 1326.960314] env[70020]: _type = "Task" [ 1326.960314] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.967300] env[70020]: DEBUG oslo_vmware.api [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619344, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.431835] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69638c03-be14-4e3d-93ff-ac6d2462c9ff {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.440291] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e3d1ca9-87df-4b58-ba55-5c31637a3f75 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.473550] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f949bc6f-7107-4209-a65c-b2eb4fbee01c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.484266] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ebe3ef5-da63-4e38-8e3b-28903798cae6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.487882] env[70020]: DEBUG oslo_vmware.api [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619344, 'name': Rename_Task} progress is 14%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.497937] env[70020]: DEBUG nova.compute.provider_tree [None req-498e3b68-1ac5-4c16-a1ca-31ab2c527638 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1327.866077] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1327.866239] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1327.866362] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1327.866516] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1327.866658] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1327.866803] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1327.866939] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=70020) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1327.979603] env[70020]: DEBUG oslo_vmware.api [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619344, 'name': Rename_Task, 'duration_secs': 0.839634} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.979923] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1327.980118] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-352d53f9-eb5c-492e-ba7e-8e26cfb73c33 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.986040] env[70020]: DEBUG oslo_vmware.api [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1327.986040] env[70020]: value = "task-3619345" [ 1327.986040] env[70020]: _type = "Task" [ 1327.986040] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.993282] env[70020]: DEBUG oslo_vmware.api [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619345, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.001314] env[70020]: DEBUG nova.scheduler.client.report [None req-498e3b68-1ac5-4c16-a1ca-31ab2c527638 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1328.495987] env[70020]: DEBUG oslo_vmware.api [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619345, 'name': PowerOnVM_Task, 'duration_secs': 0.475494} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.496211] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1328.496415] env[70020]: INFO nova.compute.manager [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Took 7.68 seconds to spawn the instance on the hypervisor. [ 1328.496593] env[70020]: DEBUG nova.compute.manager [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1328.497367] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-300e7bd8-61bf-4097-a0a1-a4d12cd9ea3b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.505804] env[70020]: DEBUG oslo_concurrency.lockutils [None req-498e3b68-1ac5-4c16-a1ca-31ab2c527638 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.636s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1328.526117] env[70020]: INFO nova.scheduler.client.report [None req-498e3b68-1ac5-4c16-a1ca-31ab2c527638 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Deleted allocations for instance 4b73ae75-c403-4268-8eab-4d6c32aef950 [ 1329.015379] env[70020]: INFO nova.compute.manager [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Took 12.38 seconds to build instance. [ 1329.034686] env[70020]: DEBUG oslo_concurrency.lockutils [None req-498e3b68-1ac5-4c16-a1ca-31ab2c527638 tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "4b73ae75-c403-4268-8eab-4d6c32aef950" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.467s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1329.218270] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7eed8cfc-65df-4ddd-befa-e9aaff693f9d tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "e96aae17-5ae5-404b-bbe3-46777f7c34d2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1329.218540] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7eed8cfc-65df-4ddd-befa-e9aaff693f9d tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "e96aae17-5ae5-404b-bbe3-46777f7c34d2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1329.218799] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7eed8cfc-65df-4ddd-befa-e9aaff693f9d tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "e96aae17-5ae5-404b-bbe3-46777f7c34d2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1329.218987] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7eed8cfc-65df-4ddd-befa-e9aaff693f9d tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "e96aae17-5ae5-404b-bbe3-46777f7c34d2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1329.219176] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7eed8cfc-65df-4ddd-befa-e9aaff693f9d tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "e96aae17-5ae5-404b-bbe3-46777f7c34d2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1329.221312] env[70020]: INFO nova.compute.manager [None req-7eed8cfc-65df-4ddd-befa-e9aaff693f9d tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Terminating instance [ 1329.517612] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c8716c7f-7dd9-4500-804a-bc7602298f9d tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "422aa98b-fa01-42c5-90cf-ed70e9781208" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.892s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1329.725244] env[70020]: DEBUG nova.compute.manager [None req-7eed8cfc-65df-4ddd-befa-e9aaff693f9d tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1329.725492] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-7eed8cfc-65df-4ddd-befa-e9aaff693f9d tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1329.726455] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-529d04c3-4d35-4fcb-bee4-2a1c3a0adb83 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.734589] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eed8cfc-65df-4ddd-befa-e9aaff693f9d tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1329.734834] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b0b5feef-eee6-4a76-a31c-5fd1c7709b68 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.740982] env[70020]: DEBUG oslo_vmware.api [None req-7eed8cfc-65df-4ddd-befa-e9aaff693f9d tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1329.740982] env[70020]: value = "task-3619346" [ 1329.740982] env[70020]: _type = "Task" [ 1329.740982] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.749277] env[70020]: DEBUG oslo_vmware.api [None req-7eed8cfc-65df-4ddd-befa-e9aaff693f9d tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619346, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.765295] env[70020]: DEBUG nova.compute.manager [req-743bac4c-9896-4b66-8e62-34b5fd3383e5 req-b952e3cc-5fb7-4f88-a0d2-66916f16e891 service nova] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Received event network-changed-c8b16039-3297-434c-ae9e-e75e6ee2965d {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1329.765382] env[70020]: DEBUG nova.compute.manager [req-743bac4c-9896-4b66-8e62-34b5fd3383e5 req-b952e3cc-5fb7-4f88-a0d2-66916f16e891 service nova] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Refreshing instance network info cache due to event network-changed-c8b16039-3297-434c-ae9e-e75e6ee2965d. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1329.765562] env[70020]: DEBUG oslo_concurrency.lockutils [req-743bac4c-9896-4b66-8e62-34b5fd3383e5 req-b952e3cc-5fb7-4f88-a0d2-66916f16e891 service nova] Acquiring lock "refresh_cache-422aa98b-fa01-42c5-90cf-ed70e9781208" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1329.765712] env[70020]: DEBUG oslo_concurrency.lockutils [req-743bac4c-9896-4b66-8e62-34b5fd3383e5 req-b952e3cc-5fb7-4f88-a0d2-66916f16e891 service nova] Acquired lock "refresh_cache-422aa98b-fa01-42c5-90cf-ed70e9781208" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1329.765869] env[70020]: DEBUG nova.network.neutron [req-743bac4c-9896-4b66-8e62-34b5fd3383e5 req-b952e3cc-5fb7-4f88-a0d2-66916f16e891 service nova] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Refreshing network info cache for port c8b16039-3297-434c-ae9e-e75e6ee2965d {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1330.251056] env[70020]: DEBUG oslo_vmware.api [None req-7eed8cfc-65df-4ddd-befa-e9aaff693f9d tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619346, 'name': PowerOffVM_Task, 'duration_secs': 0.183528} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.251388] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eed8cfc-65df-4ddd-befa-e9aaff693f9d tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1330.251487] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-7eed8cfc-65df-4ddd-befa-e9aaff693f9d tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1330.251727] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-de41b2ef-b39b-4feb-9520-83a322335482 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.313518] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-7eed8cfc-65df-4ddd-befa-e9aaff693f9d tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1330.313781] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-7eed8cfc-65df-4ddd-befa-e9aaff693f9d tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1330.314098] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-7eed8cfc-65df-4ddd-befa-e9aaff693f9d tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Deleting the datastore file [datastore1] e96aae17-5ae5-404b-bbe3-46777f7c34d2 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1330.314383] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1587d6cf-571b-4bb4-87c7-e27dbf8f0149 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.320622] env[70020]: DEBUG oslo_vmware.api [None req-7eed8cfc-65df-4ddd-befa-e9aaff693f9d tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for the task: (returnval){ [ 1330.320622] env[70020]: value = "task-3619348" [ 1330.320622] env[70020]: _type = "Task" [ 1330.320622] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.328679] env[70020]: DEBUG oslo_vmware.api [None req-7eed8cfc-65df-4ddd-befa-e9aaff693f9d tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619348, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.464693] env[70020]: DEBUG nova.network.neutron [req-743bac4c-9896-4b66-8e62-34b5fd3383e5 req-b952e3cc-5fb7-4f88-a0d2-66916f16e891 service nova] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Updated VIF entry in instance network info cache for port c8b16039-3297-434c-ae9e-e75e6ee2965d. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1330.465090] env[70020]: DEBUG nova.network.neutron [req-743bac4c-9896-4b66-8e62-34b5fd3383e5 req-b952e3cc-5fb7-4f88-a0d2-66916f16e891 service nova] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Updating instance_info_cache with network_info: [{"id": "c8b16039-3297-434c-ae9e-e75e6ee2965d", "address": "fa:16:3e:e4:c2:e7", "network": {"id": "e7ac5730-3f56-469f-b3b1-25a06edc1284", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-765496064-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b34ec8c1ad864be694a6f9ce2b8a7788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8b16039-32", "ovs_interfaceid": "c8b16039-3297-434c-ae9e-e75e6ee2965d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1330.830055] env[70020]: DEBUG oslo_vmware.api [None req-7eed8cfc-65df-4ddd-befa-e9aaff693f9d tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Task: {'id': task-3619348, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141755} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.830328] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-7eed8cfc-65df-4ddd-befa-e9aaff693f9d tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1330.830518] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-7eed8cfc-65df-4ddd-befa-e9aaff693f9d tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1330.830696] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-7eed8cfc-65df-4ddd-befa-e9aaff693f9d tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1330.830865] env[70020]: INFO nova.compute.manager [None req-7eed8cfc-65df-4ddd-befa-e9aaff693f9d tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1330.831124] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7eed8cfc-65df-4ddd-befa-e9aaff693f9d tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1330.831322] env[70020]: DEBUG nova.compute.manager [-] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1330.831417] env[70020]: DEBUG nova.network.neutron [-] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1330.969352] env[70020]: DEBUG oslo_concurrency.lockutils [req-743bac4c-9896-4b66-8e62-34b5fd3383e5 req-b952e3cc-5fb7-4f88-a0d2-66916f16e891 service nova] Releasing lock "refresh_cache-422aa98b-fa01-42c5-90cf-ed70e9781208" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1331.709768] env[70020]: DEBUG nova.network.neutron [-] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1331.792021] env[70020]: DEBUG nova.compute.manager [req-58e29e1c-dada-465c-87e3-96cdd9f78fc1 req-4e6bf06e-2f5d-4c4b-8074-ad6b344aa2f2 service nova] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Received event network-vif-deleted-0889717d-3194-4204-a46b-57e94fc35d6c {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1332.213044] env[70020]: INFO nova.compute.manager [-] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Took 1.38 seconds to deallocate network for instance. [ 1332.719162] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7eed8cfc-65df-4ddd-befa-e9aaff693f9d tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1332.719473] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7eed8cfc-65df-4ddd-befa-e9aaff693f9d tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1332.719690] env[70020]: DEBUG nova.objects.instance [None req-7eed8cfc-65df-4ddd-befa-e9aaff693f9d tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lazy-loading 'resources' on Instance uuid e96aae17-5ae5-404b-bbe3-46777f7c34d2 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1333.135359] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1333.275419] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c54fa6c-b237-4e44-b9ce-c253e629a516 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.284543] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d7059a3-8232-44ae-9c75-7b925219164d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.313883] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b8ce6dd-67a1-4dda-8698-50f663d491b5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.320397] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ea6eeff-4913-45c8-868a-b7c402af07fe {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.332833] env[70020]: DEBUG nova.compute.provider_tree [None req-7eed8cfc-65df-4ddd-befa-e9aaff693f9d tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1333.835919] env[70020]: DEBUG nova.scheduler.client.report [None req-7eed8cfc-65df-4ddd-befa-e9aaff693f9d tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1334.340667] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7eed8cfc-65df-4ddd-befa-e9aaff693f9d tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.621s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1334.360176] env[70020]: INFO nova.scheduler.client.report [None req-7eed8cfc-65df-4ddd-befa-e9aaff693f9d tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Deleted allocations for instance e96aae17-5ae5-404b-bbe3-46777f7c34d2 [ 1334.868475] env[70020]: DEBUG oslo_concurrency.lockutils [None req-7eed8cfc-65df-4ddd-befa-e9aaff693f9d tempest-AttachInterfacesTestJSON-1580231832 tempest-AttachInterfacesTestJSON-1580231832-project-member] Lock "e96aae17-5ae5-404b-bbe3-46777f7c34d2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.650s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1339.008615] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5df8952d-0f74-421f-85dc-0213252ad684 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquiring lock "399d55b7-2a79-4849-89b6-ff8d1c0d33e1" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1339.009048] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5df8952d-0f74-421f-85dc-0213252ad684 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lock "399d55b7-2a79-4849-89b6-ff8d1c0d33e1" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1339.512114] env[70020]: INFO nova.compute.manager [None req-5df8952d-0f74-421f-85dc-0213252ad684 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Detaching volume fd719fc4-e0af-4335-b966-f7758bcc701e [ 1339.540956] env[70020]: INFO nova.virt.block_device [None req-5df8952d-0f74-421f-85dc-0213252ad684 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Attempting to driver detach volume fd719fc4-e0af-4335-b966-f7758bcc701e from mountpoint /dev/sdb [ 1339.541206] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-5df8952d-0f74-421f-85dc-0213252ad684 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Volume detach. Driver type: vmdk {{(pid=70020) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1339.541386] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-5df8952d-0f74-421f-85dc-0213252ad684 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721844', 'volume_id': 'fd719fc4-e0af-4335-b966-f7758bcc701e', 'name': 'volume-fd719fc4-e0af-4335-b966-f7758bcc701e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '399d55b7-2a79-4849-89b6-ff8d1c0d33e1', 'attached_at': '', 'detached_at': '', 'volume_id': 'fd719fc4-e0af-4335-b966-f7758bcc701e', 'serial': 'fd719fc4-e0af-4335-b966-f7758bcc701e'} {{(pid=70020) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1339.542251] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06b4ab3a-ca6d-471c-89b0-71519eaaffaf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.563570] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8da67f54-fce7-4bbb-a2f0-dbfda16427af {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.569950] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bfdb261-3cca-4504-a661-de7493429229 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.589297] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64ee385d-09d7-463d-b63b-c3c28b7d338d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.602925] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-5df8952d-0f74-421f-85dc-0213252ad684 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] The volume has not been displaced from its original location: [datastore2] volume-fd719fc4-e0af-4335-b966-f7758bcc701e/volume-fd719fc4-e0af-4335-b966-f7758bcc701e.vmdk. No consolidation needed. {{(pid=70020) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1339.608180] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-5df8952d-0f74-421f-85dc-0213252ad684 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Reconfiguring VM instance instance-00000076 to detach disk 2001 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1339.608425] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7fb630f1-0b9a-4398-91ac-ca78ccb8768d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.625200] env[70020]: DEBUG oslo_vmware.api [None req-5df8952d-0f74-421f-85dc-0213252ad684 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1339.625200] env[70020]: value = "task-3619349" [ 1339.625200] env[70020]: _type = "Task" [ 1339.625200] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.635127] env[70020]: DEBUG oslo_vmware.api [None req-5df8952d-0f74-421f-85dc-0213252ad684 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619349, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.134974] env[70020]: DEBUG oslo_vmware.api [None req-5df8952d-0f74-421f-85dc-0213252ad684 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619349, 'name': ReconfigVM_Task, 'duration_secs': 0.212832} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.135368] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-5df8952d-0f74-421f-85dc-0213252ad684 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Reconfigured VM instance instance-00000076 to detach disk 2001 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1340.139872] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-515ab29f-16a4-4cf2-a1b9-2a675fffbf4f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.154074] env[70020]: DEBUG oslo_vmware.api [None req-5df8952d-0f74-421f-85dc-0213252ad684 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1340.154074] env[70020]: value = "task-3619350" [ 1340.154074] env[70020]: _type = "Task" [ 1340.154074] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1340.161161] env[70020]: DEBUG oslo_vmware.api [None req-5df8952d-0f74-421f-85dc-0213252ad684 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619350, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.665071] env[70020]: DEBUG oslo_vmware.api [None req-5df8952d-0f74-421f-85dc-0213252ad684 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619350, 'name': ReconfigVM_Task, 'duration_secs': 0.139021} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.665357] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-5df8952d-0f74-421f-85dc-0213252ad684 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721844', 'volume_id': 'fd719fc4-e0af-4335-b966-f7758bcc701e', 'name': 'volume-fd719fc4-e0af-4335-b966-f7758bcc701e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '399d55b7-2a79-4849-89b6-ff8d1c0d33e1', 'attached_at': '', 'detached_at': '', 'volume_id': 'fd719fc4-e0af-4335-b966-f7758bcc701e', 'serial': 'fd719fc4-e0af-4335-b966-f7758bcc701e'} {{(pid=70020) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1341.204883] env[70020]: DEBUG nova.objects.instance [None req-5df8952d-0f74-421f-85dc-0213252ad684 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lazy-loading 'flavor' on Instance uuid 399d55b7-2a79-4849-89b6-ff8d1c0d33e1 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1342.212438] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5df8952d-0f74-421f-85dc-0213252ad684 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lock "399d55b7-2a79-4849-89b6-ff8d1c0d33e1" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.203s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1343.251553] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0049cb66-5367-43f2-bf14-dea4bc899e14 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquiring lock "399d55b7-2a79-4849-89b6-ff8d1c0d33e1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1343.251922] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0049cb66-5367-43f2-bf14-dea4bc899e14 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lock "399d55b7-2a79-4849-89b6-ff8d1c0d33e1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1343.252040] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0049cb66-5367-43f2-bf14-dea4bc899e14 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquiring lock "399d55b7-2a79-4849-89b6-ff8d1c0d33e1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1343.252212] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0049cb66-5367-43f2-bf14-dea4bc899e14 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lock "399d55b7-2a79-4849-89b6-ff8d1c0d33e1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1343.252380] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0049cb66-5367-43f2-bf14-dea4bc899e14 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lock "399d55b7-2a79-4849-89b6-ff8d1c0d33e1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1343.254531] env[70020]: INFO nova.compute.manager [None req-0049cb66-5367-43f2-bf14-dea4bc899e14 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Terminating instance [ 1343.757869] env[70020]: DEBUG nova.compute.manager [None req-0049cb66-5367-43f2-bf14-dea4bc899e14 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1343.758150] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0049cb66-5367-43f2-bf14-dea4bc899e14 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1343.759757] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f72bff84-f2c4-4b6d-be4b-d7ebf68b610b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.766901] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0049cb66-5367-43f2-bf14-dea4bc899e14 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1343.767144] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b7cf605d-ffc3-4601-8d4c-a9998f9a95b9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.773559] env[70020]: DEBUG oslo_vmware.api [None req-0049cb66-5367-43f2-bf14-dea4bc899e14 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1343.773559] env[70020]: value = "task-3619351" [ 1343.773559] env[70020]: _type = "Task" [ 1343.773559] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1343.781554] env[70020]: DEBUG oslo_vmware.api [None req-0049cb66-5367-43f2-bf14-dea4bc899e14 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619351, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.283318] env[70020]: DEBUG oslo_vmware.api [None req-0049cb66-5367-43f2-bf14-dea4bc899e14 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619351, 'name': PowerOffVM_Task, 'duration_secs': 0.173578} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.283676] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0049cb66-5367-43f2-bf14-dea4bc899e14 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1344.283742] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0049cb66-5367-43f2-bf14-dea4bc899e14 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1344.283984] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9257d8e9-437a-4919-ab01-d48a19134de0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.350026] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0049cb66-5367-43f2-bf14-dea4bc899e14 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1344.350256] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0049cb66-5367-43f2-bf14-dea4bc899e14 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1344.350435] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-0049cb66-5367-43f2-bf14-dea4bc899e14 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Deleting the datastore file [datastore2] 399d55b7-2a79-4849-89b6-ff8d1c0d33e1 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1344.350685] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f1e15595-3dce-4c3c-ba4e-1ae68851ce52 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.356946] env[70020]: DEBUG oslo_vmware.api [None req-0049cb66-5367-43f2-bf14-dea4bc899e14 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1344.356946] env[70020]: value = "task-3619353" [ 1344.356946] env[70020]: _type = "Task" [ 1344.356946] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.363875] env[70020]: DEBUG oslo_vmware.api [None req-0049cb66-5367-43f2-bf14-dea4bc899e14 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619353, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.867215] env[70020]: DEBUG oslo_vmware.api [None req-0049cb66-5367-43f2-bf14-dea4bc899e14 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619353, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.129165} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.867452] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-0049cb66-5367-43f2-bf14-dea4bc899e14 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1344.867632] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0049cb66-5367-43f2-bf14-dea4bc899e14 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1344.867797] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0049cb66-5367-43f2-bf14-dea4bc899e14 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1344.868024] env[70020]: INFO nova.compute.manager [None req-0049cb66-5367-43f2-bf14-dea4bc899e14 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1344.868273] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0049cb66-5367-43f2-bf14-dea4bc899e14 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1344.868459] env[70020]: DEBUG nova.compute.manager [-] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1344.868554] env[70020]: DEBUG nova.network.neutron [-] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1345.300679] env[70020]: DEBUG nova.compute.manager [req-70a4e9d7-6f82-48c9-b882-5a7d570ca9f0 req-29afd867-7f05-4753-b0fa-0682c1da2a61 service nova] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Received event network-vif-deleted-b3b2c85d-9fe6-403f-bc6d-d003d2a06aef {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1345.300679] env[70020]: INFO nova.compute.manager [req-70a4e9d7-6f82-48c9-b882-5a7d570ca9f0 req-29afd867-7f05-4753-b0fa-0682c1da2a61 service nova] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Neutron deleted interface b3b2c85d-9fe6-403f-bc6d-d003d2a06aef; detaching it from the instance and deleting it from the info cache [ 1345.300679] env[70020]: DEBUG nova.network.neutron [req-70a4e9d7-6f82-48c9-b882-5a7d570ca9f0 req-29afd867-7f05-4753-b0fa-0682c1da2a61 service nova] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1345.775137] env[70020]: DEBUG nova.network.neutron [-] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1345.802540] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3dde127b-8027-4582-9742-b17492c9949e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.812895] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88915f97-e1d1-4780-a11a-d4e745fa42d9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.837490] env[70020]: DEBUG nova.compute.manager [req-70a4e9d7-6f82-48c9-b882-5a7d570ca9f0 req-29afd867-7f05-4753-b0fa-0682c1da2a61 service nova] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Detach interface failed, port_id=b3b2c85d-9fe6-403f-bc6d-d003d2a06aef, reason: Instance 399d55b7-2a79-4849-89b6-ff8d1c0d33e1 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1346.278190] env[70020]: INFO nova.compute.manager [-] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Took 1.41 seconds to deallocate network for instance. [ 1346.784914] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0049cb66-5367-43f2-bf14-dea4bc899e14 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1346.785307] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0049cb66-5367-43f2-bf14-dea4bc899e14 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1346.785466] env[70020]: DEBUG nova.objects.instance [None req-0049cb66-5367-43f2-bf14-dea4bc899e14 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lazy-loading 'resources' on Instance uuid 399d55b7-2a79-4849-89b6-ff8d1c0d33e1 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1347.327063] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ff54657-02cb-4643-9b3a-106d6ace5e70 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.334516] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ed651ae-de1c-4dff-87df-44ef42949a65 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.363561] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69143550-062c-409f-861d-876d9c0f3534 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.370185] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51fad39f-68ec-4d2f-82e2-46db9eeef96a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.384271] env[70020]: DEBUG nova.compute.provider_tree [None req-0049cb66-5367-43f2-bf14-dea4bc899e14 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1347.886958] env[70020]: DEBUG nova.scheduler.client.report [None req-0049cb66-5367-43f2-bf14-dea4bc899e14 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1348.393079] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0049cb66-5367-43f2-bf14-dea4bc899e14 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.607s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1348.410114] env[70020]: INFO nova.scheduler.client.report [None req-0049cb66-5367-43f2-bf14-dea4bc899e14 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Deleted allocations for instance 399d55b7-2a79-4849-89b6-ff8d1c0d33e1 [ 1348.918462] env[70020]: DEBUG oslo_concurrency.lockutils [None req-0049cb66-5367-43f2-bf14-dea4bc899e14 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lock "399d55b7-2a79-4849-89b6-ff8d1c0d33e1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.666s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1351.109757] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquiring lock "3dc276f3-3f80-4732-a76d-560e6b057a56" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1351.110038] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lock "3dc276f3-3f80-4732-a76d-560e6b057a56" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1351.614050] env[70020]: DEBUG nova.compute.manager [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1352.135891] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1352.136201] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1352.137744] env[70020]: INFO nova.compute.claims [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1353.180942] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-065f5daa-48ba-4449-9378-eb0b9ab32a7b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.188713] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b5749e5-87bc-4177-a544-b74c51e5455b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.219761] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8029b36-d120-4d6e-bdbc-5e01013b7fef {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.226329] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5189f1e-548e-4fac-86da-c6ae749faeaa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.238975] env[70020]: DEBUG nova.compute.provider_tree [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1353.742296] env[70020]: DEBUG nova.scheduler.client.report [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1354.246728] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.110s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1354.247279] env[70020]: DEBUG nova.compute.manager [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1354.752178] env[70020]: DEBUG nova.compute.utils [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1354.753672] env[70020]: DEBUG nova.compute.manager [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1354.753840] env[70020]: DEBUG nova.network.neutron [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1354.804044] env[70020]: DEBUG nova.policy [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b241829c09b0497f9f30f85c2d2fe85a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '11384e127368415d82f2e8a7e985b17e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1355.061623] env[70020]: DEBUG nova.network.neutron [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Successfully created port: d6a9de80-b7ec-45e7-9bad-0e115b06a3b9 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1355.257095] env[70020]: DEBUG nova.compute.manager [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1356.267156] env[70020]: DEBUG nova.compute.manager [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1356.292163] env[70020]: DEBUG nova.virt.hardware [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1356.292410] env[70020]: DEBUG nova.virt.hardware [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1356.292563] env[70020]: DEBUG nova.virt.hardware [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1356.292741] env[70020]: DEBUG nova.virt.hardware [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1356.292882] env[70020]: DEBUG nova.virt.hardware [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1356.293039] env[70020]: DEBUG nova.virt.hardware [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1356.293255] env[70020]: DEBUG nova.virt.hardware [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1356.293410] env[70020]: DEBUG nova.virt.hardware [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1356.293573] env[70020]: DEBUG nova.virt.hardware [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1356.293732] env[70020]: DEBUG nova.virt.hardware [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1356.293901] env[70020]: DEBUG nova.virt.hardware [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1356.294772] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fb199b7-127f-4602-b06a-97a6b3f10b8a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.304908] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9402e9a-68f1-462a-a6bd-3320c481b906 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.402341] env[70020]: DEBUG nova.compute.manager [req-e760a67e-019f-4f91-b443-ae28a91ef297 req-19a6c311-40cf-4070-9a97-90c9f3ad5bc9 service nova] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Received event network-vif-plugged-d6a9de80-b7ec-45e7-9bad-0e115b06a3b9 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1356.402518] env[70020]: DEBUG oslo_concurrency.lockutils [req-e760a67e-019f-4f91-b443-ae28a91ef297 req-19a6c311-40cf-4070-9a97-90c9f3ad5bc9 service nova] Acquiring lock "3dc276f3-3f80-4732-a76d-560e6b057a56-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1356.402724] env[70020]: DEBUG oslo_concurrency.lockutils [req-e760a67e-019f-4f91-b443-ae28a91ef297 req-19a6c311-40cf-4070-9a97-90c9f3ad5bc9 service nova] Lock "3dc276f3-3f80-4732-a76d-560e6b057a56-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1356.402887] env[70020]: DEBUG oslo_concurrency.lockutils [req-e760a67e-019f-4f91-b443-ae28a91ef297 req-19a6c311-40cf-4070-9a97-90c9f3ad5bc9 service nova] Lock "3dc276f3-3f80-4732-a76d-560e6b057a56-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1356.403057] env[70020]: DEBUG nova.compute.manager [req-e760a67e-019f-4f91-b443-ae28a91ef297 req-19a6c311-40cf-4070-9a97-90c9f3ad5bc9 service nova] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] No waiting events found dispatching network-vif-plugged-d6a9de80-b7ec-45e7-9bad-0e115b06a3b9 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1356.403222] env[70020]: WARNING nova.compute.manager [req-e760a67e-019f-4f91-b443-ae28a91ef297 req-19a6c311-40cf-4070-9a97-90c9f3ad5bc9 service nova] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Received unexpected event network-vif-plugged-d6a9de80-b7ec-45e7-9bad-0e115b06a3b9 for instance with vm_state building and task_state spawning. [ 1356.484083] env[70020]: DEBUG nova.network.neutron [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Successfully updated port: d6a9de80-b7ec-45e7-9bad-0e115b06a3b9 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1356.987126] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquiring lock "refresh_cache-3dc276f3-3f80-4732-a76d-560e6b057a56" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1356.987260] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquired lock "refresh_cache-3dc276f3-3f80-4732-a76d-560e6b057a56" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1356.987418] env[70020]: DEBUG nova.network.neutron [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1357.519107] env[70020]: DEBUG nova.network.neutron [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1357.633043] env[70020]: DEBUG nova.network.neutron [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Updating instance_info_cache with network_info: [{"id": "d6a9de80-b7ec-45e7-9bad-0e115b06a3b9", "address": "fa:16:3e:52:7c:a2", "network": {"id": "ed5fa951-66b0-45af-8569-314b06003130", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1735933033-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11384e127368415d82f2e8a7e985b17e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "af454577-0e89-41a3-a9f2-f39716f62fd5", "external-id": "nsx-vlan-transportzone-63", "segmentation_id": 63, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6a9de80-b7", "ovs_interfaceid": "d6a9de80-b7ec-45e7-9bad-0e115b06a3b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1358.135664] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Releasing lock "refresh_cache-3dc276f3-3f80-4732-a76d-560e6b057a56" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1358.135994] env[70020]: DEBUG nova.compute.manager [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Instance network_info: |[{"id": "d6a9de80-b7ec-45e7-9bad-0e115b06a3b9", "address": "fa:16:3e:52:7c:a2", "network": {"id": "ed5fa951-66b0-45af-8569-314b06003130", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1735933033-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11384e127368415d82f2e8a7e985b17e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "af454577-0e89-41a3-a9f2-f39716f62fd5", "external-id": "nsx-vlan-transportzone-63", "segmentation_id": 63, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6a9de80-b7", "ovs_interfaceid": "d6a9de80-b7ec-45e7-9bad-0e115b06a3b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1358.136443] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:7c:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'af454577-0e89-41a3-a9f2-f39716f62fd5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd6a9de80-b7ec-45e7-9bad-0e115b06a3b9', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1358.143966] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1358.144179] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1358.144410] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e70072c2-2138-422c-a91f-b3a0abdd757c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.164866] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1358.164866] env[70020]: value = "task-3619354" [ 1358.164866] env[70020]: _type = "Task" [ 1358.164866] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.173215] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619354, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.427673] env[70020]: DEBUG nova.compute.manager [req-eaaa1b56-d82a-47a6-b4fd-cc9e948cbef7 req-384da119-4ad0-4de4-8c31-bf080487c67a service nova] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Received event network-changed-d6a9de80-b7ec-45e7-9bad-0e115b06a3b9 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1358.427881] env[70020]: DEBUG nova.compute.manager [req-eaaa1b56-d82a-47a6-b4fd-cc9e948cbef7 req-384da119-4ad0-4de4-8c31-bf080487c67a service nova] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Refreshing instance network info cache due to event network-changed-d6a9de80-b7ec-45e7-9bad-0e115b06a3b9. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1358.428153] env[70020]: DEBUG oslo_concurrency.lockutils [req-eaaa1b56-d82a-47a6-b4fd-cc9e948cbef7 req-384da119-4ad0-4de4-8c31-bf080487c67a service nova] Acquiring lock "refresh_cache-3dc276f3-3f80-4732-a76d-560e6b057a56" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1358.428305] env[70020]: DEBUG oslo_concurrency.lockutils [req-eaaa1b56-d82a-47a6-b4fd-cc9e948cbef7 req-384da119-4ad0-4de4-8c31-bf080487c67a service nova] Acquired lock "refresh_cache-3dc276f3-3f80-4732-a76d-560e6b057a56" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1358.428462] env[70020]: DEBUG nova.network.neutron [req-eaaa1b56-d82a-47a6-b4fd-cc9e948cbef7 req-384da119-4ad0-4de4-8c31-bf080487c67a service nova] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Refreshing network info cache for port d6a9de80-b7ec-45e7-9bad-0e115b06a3b9 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1358.674296] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619354, 'name': CreateVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.102127] env[70020]: DEBUG nova.network.neutron [req-eaaa1b56-d82a-47a6-b4fd-cc9e948cbef7 req-384da119-4ad0-4de4-8c31-bf080487c67a service nova] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Updated VIF entry in instance network info cache for port d6a9de80-b7ec-45e7-9bad-0e115b06a3b9. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1359.102469] env[70020]: DEBUG nova.network.neutron [req-eaaa1b56-d82a-47a6-b4fd-cc9e948cbef7 req-384da119-4ad0-4de4-8c31-bf080487c67a service nova] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Updating instance_info_cache with network_info: [{"id": "d6a9de80-b7ec-45e7-9bad-0e115b06a3b9", "address": "fa:16:3e:52:7c:a2", "network": {"id": "ed5fa951-66b0-45af-8569-314b06003130", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1735933033-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11384e127368415d82f2e8a7e985b17e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "af454577-0e89-41a3-a9f2-f39716f62fd5", "external-id": "nsx-vlan-transportzone-63", "segmentation_id": 63, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6a9de80-b7", "ovs_interfaceid": "d6a9de80-b7ec-45e7-9bad-0e115b06a3b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1359.175026] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619354, 'name': CreateVM_Task, 'duration_secs': 0.902598} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.175283] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1359.175955] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1359.176157] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1359.176484] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1359.177023] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c1057ff-1e05-4992-814f-d7f21ef6303c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.181311] env[70020]: DEBUG oslo_vmware.api [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1359.181311] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52ef2a95-40fc-db69-b4a1-e746fbbd707f" [ 1359.181311] env[70020]: _type = "Task" [ 1359.181311] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.188480] env[70020]: DEBUG oslo_vmware.api [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ef2a95-40fc-db69-b4a1-e746fbbd707f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.605773] env[70020]: DEBUG oslo_concurrency.lockutils [req-eaaa1b56-d82a-47a6-b4fd-cc9e948cbef7 req-384da119-4ad0-4de4-8c31-bf080487c67a service nova] Releasing lock "refresh_cache-3dc276f3-3f80-4732-a76d-560e6b057a56" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1359.694074] env[70020]: DEBUG oslo_vmware.api [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ef2a95-40fc-db69-b4a1-e746fbbd707f, 'name': SearchDatastore_Task, 'duration_secs': 0.010818} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.694074] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1359.694074] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1359.694074] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1359.694518] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1359.694518] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1359.694576] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fb469483-69c7-4064-a7da-29be62e14856 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.702739] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1359.702901] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1359.703564] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf2be778-76fa-4de9-8495-7ef3c1bccaac {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.708331] env[70020]: DEBUG oslo_vmware.api [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1359.708331] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52471f5d-5d64-0aea-ee09-c89e6e193423" [ 1359.708331] env[70020]: _type = "Task" [ 1359.708331] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.715313] env[70020]: DEBUG oslo_vmware.api [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52471f5d-5d64-0aea-ee09-c89e6e193423, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.218897] env[70020]: DEBUG oslo_vmware.api [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52471f5d-5d64-0aea-ee09-c89e6e193423, 'name': SearchDatastore_Task, 'duration_secs': 0.009274} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.219691] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96adce99-3a6e-43a0-87a5-297c94f9c368 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.224609] env[70020]: DEBUG oslo_vmware.api [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1360.224609] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52fff536-ee48-3f4b-eb97-aa5bdc7f085b" [ 1360.224609] env[70020]: _type = "Task" [ 1360.224609] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.231818] env[70020]: DEBUG oslo_vmware.api [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52fff536-ee48-3f4b-eb97-aa5bdc7f085b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.734812] env[70020]: DEBUG oslo_vmware.api [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52fff536-ee48-3f4b-eb97-aa5bdc7f085b, 'name': SearchDatastore_Task, 'duration_secs': 0.009468} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.735178] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1360.735323] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 3dc276f3-3f80-4732-a76d-560e6b057a56/3dc276f3-3f80-4732-a76d-560e6b057a56.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1360.735573] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dfa369e7-b651-4421-94ef-eb350f57177a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.741972] env[70020]: DEBUG oslo_vmware.api [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1360.741972] env[70020]: value = "task-3619355" [ 1360.741972] env[70020]: _type = "Task" [ 1360.741972] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.748784] env[70020]: DEBUG oslo_vmware.api [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619355, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.251397] env[70020]: DEBUG oslo_vmware.api [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619355, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.408735} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.251659] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 3dc276f3-3f80-4732-a76d-560e6b057a56/3dc276f3-3f80-4732-a76d-560e6b057a56.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1361.251862] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1361.252115] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b47cd784-a07f-4884-9480-bac22b8ddb12 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.258799] env[70020]: DEBUG oslo_vmware.api [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1361.258799] env[70020]: value = "task-3619356" [ 1361.258799] env[70020]: _type = "Task" [ 1361.258799] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.265704] env[70020]: DEBUG oslo_vmware.api [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619356, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.768316] env[70020]: DEBUG oslo_vmware.api [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619356, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067496} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.768701] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1361.769324] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9318daf6-1051-47b4-95fe-e313643ca451 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.790301] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Reconfiguring VM instance instance-0000007e to attach disk [datastore1] 3dc276f3-3f80-4732-a76d-560e6b057a56/3dc276f3-3f80-4732-a76d-560e6b057a56.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1361.790513] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-af7cbbd4-5625-4621-8123-708d2f28da05 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.809033] env[70020]: DEBUG oslo_vmware.api [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1361.809033] env[70020]: value = "task-3619357" [ 1361.809033] env[70020]: _type = "Task" [ 1361.809033] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.816009] env[70020]: DEBUG oslo_vmware.api [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619357, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.318842] env[70020]: DEBUG oslo_vmware.api [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619357, 'name': ReconfigVM_Task, 'duration_secs': 0.262217} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.319187] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Reconfigured VM instance instance-0000007e to attach disk [datastore1] 3dc276f3-3f80-4732-a76d-560e6b057a56/3dc276f3-3f80-4732-a76d-560e6b057a56.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1362.319743] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7976356d-794e-4546-b385-bf47fee7e14b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.326854] env[70020]: DEBUG oslo_vmware.api [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1362.326854] env[70020]: value = "task-3619358" [ 1362.326854] env[70020]: _type = "Task" [ 1362.326854] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.333940] env[70020]: DEBUG oslo_vmware.api [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619358, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.836421] env[70020]: DEBUG oslo_vmware.api [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619358, 'name': Rename_Task, 'duration_secs': 0.305776} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.836781] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1362.836876] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2dbc5783-f7f0-42ec-8dec-310fd3fe15b9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.843476] env[70020]: DEBUG oslo_vmware.api [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1362.843476] env[70020]: value = "task-3619359" [ 1362.843476] env[70020]: _type = "Task" [ 1362.843476] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.853360] env[70020]: DEBUG oslo_vmware.api [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619359, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.353617] env[70020]: DEBUG oslo_vmware.api [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619359, 'name': PowerOnVM_Task, 'duration_secs': 0.428946} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.353914] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1363.354131] env[70020]: INFO nova.compute.manager [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Took 7.09 seconds to spawn the instance on the hypervisor. [ 1363.354288] env[70020]: DEBUG nova.compute.manager [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1363.355044] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47cf0640-e8b3-4245-a638-2b4f6f599172 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.874277] env[70020]: INFO nova.compute.manager [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Took 11.76 seconds to build instance. [ 1364.276906] env[70020]: DEBUG nova.compute.manager [req-0a787df5-f196-43a0-bd87-a26cc29d5a36 req-01b4545b-af2f-465b-b632-98f273c65b93 service nova] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Received event network-changed-d6a9de80-b7ec-45e7-9bad-0e115b06a3b9 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1364.277148] env[70020]: DEBUG nova.compute.manager [req-0a787df5-f196-43a0-bd87-a26cc29d5a36 req-01b4545b-af2f-465b-b632-98f273c65b93 service nova] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Refreshing instance network info cache due to event network-changed-d6a9de80-b7ec-45e7-9bad-0e115b06a3b9. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1364.277336] env[70020]: DEBUG oslo_concurrency.lockutils [req-0a787df5-f196-43a0-bd87-a26cc29d5a36 req-01b4545b-af2f-465b-b632-98f273c65b93 service nova] Acquiring lock "refresh_cache-3dc276f3-3f80-4732-a76d-560e6b057a56" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1364.277479] env[70020]: DEBUG oslo_concurrency.lockutils [req-0a787df5-f196-43a0-bd87-a26cc29d5a36 req-01b4545b-af2f-465b-b632-98f273c65b93 service nova] Acquired lock "refresh_cache-3dc276f3-3f80-4732-a76d-560e6b057a56" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1364.277730] env[70020]: DEBUG nova.network.neutron [req-0a787df5-f196-43a0-bd87-a26cc29d5a36 req-01b4545b-af2f-465b-b632-98f273c65b93 service nova] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Refreshing network info cache for port d6a9de80-b7ec-45e7-9bad-0e115b06a3b9 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1364.376785] env[70020]: DEBUG oslo_concurrency.lockutils [None req-8e69aa51-7735-404b-b532-e37e58f21689 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lock "3dc276f3-3f80-4732-a76d-560e6b057a56" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.267s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1364.973697] env[70020]: DEBUG nova.network.neutron [req-0a787df5-f196-43a0-bd87-a26cc29d5a36 req-01b4545b-af2f-465b-b632-98f273c65b93 service nova] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Updated VIF entry in instance network info cache for port d6a9de80-b7ec-45e7-9bad-0e115b06a3b9. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1364.974096] env[70020]: DEBUG nova.network.neutron [req-0a787df5-f196-43a0-bd87-a26cc29d5a36 req-01b4545b-af2f-465b-b632-98f273c65b93 service nova] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Updating instance_info_cache with network_info: [{"id": "d6a9de80-b7ec-45e7-9bad-0e115b06a3b9", "address": "fa:16:3e:52:7c:a2", "network": {"id": "ed5fa951-66b0-45af-8569-314b06003130", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1735933033-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11384e127368415d82f2e8a7e985b17e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "af454577-0e89-41a3-a9f2-f39716f62fd5", "external-id": "nsx-vlan-transportzone-63", "segmentation_id": 63, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6a9de80-b7", "ovs_interfaceid": "d6a9de80-b7ec-45e7-9bad-0e115b06a3b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1365.476515] env[70020]: DEBUG oslo_concurrency.lockutils [req-0a787df5-f196-43a0-bd87-a26cc29d5a36 req-01b4545b-af2f-465b-b632-98f273c65b93 service nova] Releasing lock "refresh_cache-3dc276f3-3f80-4732-a76d-560e6b057a56" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1367.225770] env[70020]: DEBUG nova.compute.manager [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1367.226861] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29bb9efd-b7ad-4da1-819c-06499bf52d9d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.736663] env[70020]: INFO nova.compute.manager [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] instance snapshotting [ 1367.737314] env[70020]: DEBUG nova.objects.instance [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lazy-loading 'flavor' on Instance uuid 422aa98b-fa01-42c5-90cf-ed70e9781208 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1368.242815] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84b4574d-66d5-4214-b091-075224e57043 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.262452] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f2fda3b-c647-4752-a50c-06ce39c55a3b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.772450] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Creating Snapshot of the VM instance {{(pid=70020) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1368.772772] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ec4bdb62-8b9d-4368-844c-126faa5663cf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.780154] env[70020]: DEBUG oslo_vmware.api [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1368.780154] env[70020]: value = "task-3619360" [ 1368.780154] env[70020]: _type = "Task" [ 1368.780154] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.787449] env[70020]: DEBUG oslo_vmware.api [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619360, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.290032] env[70020]: DEBUG oslo_vmware.api [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619360, 'name': CreateSnapshot_Task, 'duration_secs': 0.462307} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1369.290462] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Created Snapshot of the VM instance {{(pid=70020) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1369.291140] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61010514-fefd-448b-a69f-5ae4843e2916 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.808907] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Creating linked-clone VM from snapshot {{(pid=70020) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1369.809253] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-6bc26e32-b8c3-4e96-b362-aa583a3ce54b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.818482] env[70020]: DEBUG oslo_vmware.api [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1369.818482] env[70020]: value = "task-3619361" [ 1369.818482] env[70020]: _type = "Task" [ 1369.818482] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.825914] env[70020]: DEBUG oslo_vmware.api [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619361, 'name': CloneVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.328013] env[70020]: DEBUG oslo_vmware.api [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619361, 'name': CloneVM_Task} progress is 93%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.829326] env[70020]: DEBUG oslo_vmware.api [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619361, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.332160] env[70020]: DEBUG oslo_vmware.api [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619361, 'name': CloneVM_Task, 'duration_secs': 1.367429} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.332525] env[70020]: INFO nova.virt.vmwareapi.vmops [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Created linked-clone VM from snapshot [ 1371.333542] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdc656a6-401a-4964-ae8b-ce177b62b7bf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.342684] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Uploading image c87219f6-2a31-43b9-a4f3-aceca50b8e81 {{(pid=70020) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1371.362011] env[70020]: DEBUG oslo_vmware.rw_handles [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1371.362011] env[70020]: value = "vm-721852" [ 1371.362011] env[70020]: _type = "VirtualMachine" [ 1371.362011] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1371.362263] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-8bbf2bb3-daca-4889-9c8f-12688f04e0f4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.368508] env[70020]: DEBUG oslo_vmware.rw_handles [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lease: (returnval){ [ 1371.368508] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]523a4bdd-6f26-e602-c14a-f69dbed6c1e7" [ 1371.368508] env[70020]: _type = "HttpNfcLease" [ 1371.368508] env[70020]: } obtained for exporting VM: (result){ [ 1371.368508] env[70020]: value = "vm-721852" [ 1371.368508] env[70020]: _type = "VirtualMachine" [ 1371.368508] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1371.368708] env[70020]: DEBUG oslo_vmware.api [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the lease: (returnval){ [ 1371.368708] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]523a4bdd-6f26-e602-c14a-f69dbed6c1e7" [ 1371.368708] env[70020]: _type = "HttpNfcLease" [ 1371.368708] env[70020]: } to be ready. {{(pid=70020) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1371.374465] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1371.374465] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]523a4bdd-6f26-e602-c14a-f69dbed6c1e7" [ 1371.374465] env[70020]: _type = "HttpNfcLease" [ 1371.374465] env[70020]: } is initializing. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1371.876853] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1371.876853] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]523a4bdd-6f26-e602-c14a-f69dbed6c1e7" [ 1371.876853] env[70020]: _type = "HttpNfcLease" [ 1371.876853] env[70020]: } is ready. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1371.877143] env[70020]: DEBUG oslo_vmware.rw_handles [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1371.877143] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]523a4bdd-6f26-e602-c14a-f69dbed6c1e7" [ 1371.877143] env[70020]: _type = "HttpNfcLease" [ 1371.877143] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1371.877818] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1df5658e-eeee-491f-af51-d9f6d12f3b55 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.884372] env[70020]: DEBUG oslo_vmware.rw_handles [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5216d79a-3f4d-54f9-e335-b3b9fd12c04e/disk-0.vmdk from lease info. {{(pid=70020) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1371.884536] env[70020]: DEBUG oslo_vmware.rw_handles [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5216d79a-3f4d-54f9-e335-b3b9fd12c04e/disk-0.vmdk for reading. {{(pid=70020) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1371.972062] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-417db954-96cd-43db-9c2e-94483aa08281 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.758953] env[70020]: DEBUG oslo_vmware.rw_handles [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5216d79a-3f4d-54f9-e335-b3b9fd12c04e/disk-0.vmdk. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1379.759904] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-373c5069-7ffc-4524-96e3-e93252f74f53 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.766017] env[70020]: DEBUG oslo_vmware.rw_handles [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5216d79a-3f4d-54f9-e335-b3b9fd12c04e/disk-0.vmdk is in state: ready. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1379.766198] env[70020]: ERROR oslo_vmware.rw_handles [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5216d79a-3f4d-54f9-e335-b3b9fd12c04e/disk-0.vmdk due to incomplete transfer. [ 1379.766444] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-15805749-3ed0-4219-91bc-809c5dacf21a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.774117] env[70020]: DEBUG oslo_vmware.rw_handles [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5216d79a-3f4d-54f9-e335-b3b9fd12c04e/disk-0.vmdk. {{(pid=70020) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1379.774309] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Uploaded image c87219f6-2a31-43b9-a4f3-aceca50b8e81 to the Glance image server {{(pid=70020) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1379.776737] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Destroying the VM {{(pid=70020) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1379.777052] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-7b7c205c-4522-43e7-af8a-fddfba7c053f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.782504] env[70020]: DEBUG oslo_vmware.api [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1379.782504] env[70020]: value = "task-3619363" [ 1379.782504] env[70020]: _type = "Task" [ 1379.782504] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.791074] env[70020]: DEBUG oslo_vmware.api [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619363, 'name': Destroy_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.292122] env[70020]: DEBUG oslo_vmware.api [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619363, 'name': Destroy_Task, 'duration_secs': 0.334341} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.292346] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Destroyed the VM [ 1380.292633] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Deleting Snapshot of the VM instance {{(pid=70020) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1380.292876] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-ca4ebdff-91b0-40a0-ad0e-82e803da4736 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.299458] env[70020]: DEBUG oslo_vmware.api [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1380.299458] env[70020]: value = "task-3619364" [ 1380.299458] env[70020]: _type = "Task" [ 1380.299458] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.306454] env[70020]: DEBUG oslo_vmware.api [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619364, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.808917] env[70020]: DEBUG oslo_vmware.api [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619364, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.309604] env[70020]: DEBUG oslo_vmware.api [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619364, 'name': RemoveSnapshot_Task, 'duration_secs': 0.521328} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.309885] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Deleted Snapshot of the VM instance {{(pid=70020) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1381.310092] env[70020]: INFO nova.compute.manager [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Took 13.07 seconds to snapshot the instance on the hypervisor. [ 1381.844884] env[70020]: DEBUG nova.compute.manager [None req-bb60a989-0045-4529-a10f-ced52292a686 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Found 1 images (rotation: 2) {{(pid=70020) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1383.089561] env[70020]: DEBUG nova.compute.manager [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1383.090944] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95a355b3-8b47-45e5-908e-2458e66758e3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.601815] env[70020]: INFO nova.compute.manager [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] instance snapshotting [ 1383.602480] env[70020]: DEBUG nova.objects.instance [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lazy-loading 'flavor' on Instance uuid 422aa98b-fa01-42c5-90cf-ed70e9781208 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1384.109067] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d59a247-ad63-4b0b-9f04-b2887521b9be {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.127511] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89460f4c-2d9e-442a-8cbf-285820d6e08f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.136188] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager.update_available_resource {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1384.640086] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Creating Snapshot of the VM instance {{(pid=70020) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1384.643519] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1384.643870] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1384.644140] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1384.644373] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=70020) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1384.644757] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-91df5fce-1aa5-4dab-a506-aedea4d49209 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.648799] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e5ab301-d874-4665-9049-2b1a4dbd440d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.660467] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54525224-9734-498d-95b4-8f5e419caeb7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.665057] env[70020]: DEBUG oslo_vmware.api [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1384.665057] env[70020]: value = "task-3619365" [ 1384.665057] env[70020]: _type = "Task" [ 1384.665057] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.685058] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5c98fb6-8391-40df-a908-169ac965ba24 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.692492] env[70020]: DEBUG oslo_vmware.api [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619365, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.698441] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c52a7b9-0b1e-4a56-af87-f975410bb865 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.745932] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180275MB free_disk=76GB free_vcpus=48 pci_devices=None {{(pid=70020) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1384.746239] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1384.746565] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1385.175790] env[70020]: DEBUG oslo_vmware.api [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619365, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.675922] env[70020]: DEBUG oslo_vmware.api [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619365, 'name': CreateSnapshot_Task, 'duration_secs': 0.597719} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.676202] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Created Snapshot of the VM instance {{(pid=70020) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1385.676906] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ccf2431-3182-4c30-9d92-682214309d68 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.770331] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 422aa98b-fa01-42c5-90cf-ed70e9781208 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1385.770483] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 3dc276f3-3f80-4732-a76d-560e6b057a56 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1385.770690] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=70020) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1385.770821] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=70020) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1385.804957] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-221df133-b0cc-4e67-81c1-87b8ee8f6e88 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.812460] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dfd9d37-35f2-4c4d-a375-81dc459ff632 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.841418] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39f9dab1-d7ad-4e5a-9bf7-00d90fd212b5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.848175] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9eaf6d4-715c-404a-a44d-de2415e23cec {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.860777] env[70020]: DEBUG nova.compute.provider_tree [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1386.194352] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Creating linked-clone VM from snapshot {{(pid=70020) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1386.194761] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-adb37656-0a45-4482-b7ed-77ef3cc45d3b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.203236] env[70020]: DEBUG oslo_vmware.api [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1386.203236] env[70020]: value = "task-3619366" [ 1386.203236] env[70020]: _type = "Task" [ 1386.203236] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.210747] env[70020]: DEBUG oslo_vmware.api [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619366, 'name': CloneVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.363997] env[70020]: DEBUG nova.scheduler.client.report [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1386.712685] env[70020]: DEBUG oslo_vmware.api [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619366, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.868847] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=70020) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1386.869064] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.122s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1387.213104] env[70020]: DEBUG oslo_vmware.api [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619366, 'name': CloneVM_Task, 'duration_secs': 0.88818} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.213545] env[70020]: INFO nova.virt.vmwareapi.vmops [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Created linked-clone VM from snapshot [ 1387.214157] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f2572a0-084b-4a9f-ae2c-5bad76abc459 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.220935] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Uploading image ba41a3c0-334a-43e8-a94d-973a199bb5ca {{(pid=70020) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1387.240701] env[70020]: DEBUG oslo_vmware.rw_handles [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1387.240701] env[70020]: value = "vm-721854" [ 1387.240701] env[70020]: _type = "VirtualMachine" [ 1387.240701] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1387.240930] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-99478c85-4d96-49fa-9a89-87b866b4941c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.247192] env[70020]: DEBUG oslo_vmware.rw_handles [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lease: (returnval){ [ 1387.247192] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]522df656-ddf7-9f91-247d-1ba9d0b4a89a" [ 1387.247192] env[70020]: _type = "HttpNfcLease" [ 1387.247192] env[70020]: } obtained for exporting VM: (result){ [ 1387.247192] env[70020]: value = "vm-721854" [ 1387.247192] env[70020]: _type = "VirtualMachine" [ 1387.247192] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1387.247393] env[70020]: DEBUG oslo_vmware.api [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the lease: (returnval){ [ 1387.247393] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]522df656-ddf7-9f91-247d-1ba9d0b4a89a" [ 1387.247393] env[70020]: _type = "HttpNfcLease" [ 1387.247393] env[70020]: } to be ready. {{(pid=70020) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1387.253159] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1387.253159] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]522df656-ddf7-9f91-247d-1ba9d0b4a89a" [ 1387.253159] env[70020]: _type = "HttpNfcLease" [ 1387.253159] env[70020]: } is initializing. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1387.755200] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1387.755200] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]522df656-ddf7-9f91-247d-1ba9d0b4a89a" [ 1387.755200] env[70020]: _type = "HttpNfcLease" [ 1387.755200] env[70020]: } is ready. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1387.755487] env[70020]: DEBUG oslo_vmware.rw_handles [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1387.755487] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]522df656-ddf7-9f91-247d-1ba9d0b4a89a" [ 1387.755487] env[70020]: _type = "HttpNfcLease" [ 1387.755487] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1387.756231] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-178a3f1b-0fee-4453-9207-f253e0e497e0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.763431] env[70020]: DEBUG oslo_vmware.rw_handles [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52817d1b-0a3e-ccf6-521a-e66dca0f4493/disk-0.vmdk from lease info. {{(pid=70020) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1387.763590] env[70020]: DEBUG oslo_vmware.rw_handles [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52817d1b-0a3e-ccf6-521a-e66dca0f4493/disk-0.vmdk for reading. {{(pid=70020) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1387.847472] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-6df4720d-15ca-4555-a280-49b6542a6cb5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.863770] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1388.864731] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1388.864731] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1388.865110] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1388.865528] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1388.865875] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1388.866280] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=70020) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1394.954648] env[70020]: DEBUG oslo_vmware.rw_handles [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52817d1b-0a3e-ccf6-521a-e66dca0f4493/disk-0.vmdk. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1394.955589] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-589b26b8-83c4-4d55-b0cf-96e42c304045 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.961771] env[70020]: DEBUG oslo_vmware.rw_handles [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52817d1b-0a3e-ccf6-521a-e66dca0f4493/disk-0.vmdk is in state: ready. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1394.961933] env[70020]: ERROR oslo_vmware.rw_handles [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52817d1b-0a3e-ccf6-521a-e66dca0f4493/disk-0.vmdk due to incomplete transfer. [ 1394.962165] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-baada5d8-56ad-4ffc-99f7-09e3a102de4d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.968889] env[70020]: DEBUG oslo_vmware.rw_handles [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52817d1b-0a3e-ccf6-521a-e66dca0f4493/disk-0.vmdk. {{(pid=70020) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1394.969093] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Uploaded image ba41a3c0-334a-43e8-a94d-973a199bb5ca to the Glance image server {{(pid=70020) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1394.971450] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Destroying the VM {{(pid=70020) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1394.971675] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-7d4a8e82-beca-4bac-8c74-f3526ad4983e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.976938] env[70020]: DEBUG oslo_vmware.api [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1394.976938] env[70020]: value = "task-3619368" [ 1394.976938] env[70020]: _type = "Task" [ 1394.976938] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.984022] env[70020]: DEBUG oslo_vmware.api [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619368, 'name': Destroy_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.135845] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1395.487070] env[70020]: DEBUG oslo_vmware.api [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619368, 'name': Destroy_Task, 'duration_secs': 0.407508} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.487323] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Destroyed the VM [ 1395.487561] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Deleting Snapshot of the VM instance {{(pid=70020) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1395.487795] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-dbba3ab2-bdaa-4136-b5a2-83c124eef504 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.493455] env[70020]: DEBUG oslo_vmware.api [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1395.493455] env[70020]: value = "task-3619369" [ 1395.493455] env[70020]: _type = "Task" [ 1395.493455] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.500484] env[70020]: DEBUG oslo_vmware.api [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619369, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.003268] env[70020]: DEBUG oslo_vmware.api [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619369, 'name': RemoveSnapshot_Task} progress is 12%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.504602] env[70020]: DEBUG oslo_vmware.api [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619369, 'name': RemoveSnapshot_Task, 'duration_secs': 0.630545} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.504855] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Deleted Snapshot of the VM instance {{(pid=70020) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1396.505074] env[70020]: INFO nova.compute.manager [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Took 12.40 seconds to snapshot the instance on the hypervisor. [ 1397.058716] env[70020]: DEBUG nova.compute.manager [None req-0b571608-43ec-43b5-8492-27e5832182e0 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Found 2 images (rotation: 2) {{(pid=70020) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1397.130846] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1397.924436] env[70020]: DEBUG nova.compute.manager [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1397.925353] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06201267-9711-4ab5-9896-2b269f228a60 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.436072] env[70020]: INFO nova.compute.manager [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] instance snapshotting [ 1398.436726] env[70020]: DEBUG nova.objects.instance [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lazy-loading 'flavor' on Instance uuid 422aa98b-fa01-42c5-90cf-ed70e9781208 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1398.942562] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-958d31d7-dc3f-4f96-b036-b97cc1da93d3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.961537] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c6af69e-ba59-4cbd-bfd5-a7b1d0ae83e7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.472057] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Creating Snapshot of the VM instance {{(pid=70020) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1399.472379] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-81dabfc0-ab86-4314-8d01-537649427c88 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.480267] env[70020]: DEBUG oslo_vmware.api [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1399.480267] env[70020]: value = "task-3619370" [ 1399.480267] env[70020]: _type = "Task" [ 1399.480267] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.487981] env[70020]: DEBUG oslo_vmware.api [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619370, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.989945] env[70020]: DEBUG oslo_vmware.api [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619370, 'name': CreateSnapshot_Task, 'duration_secs': 0.40721} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.990208] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Created Snapshot of the VM instance {{(pid=70020) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1399.990916] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1d6ae79-8c30-4da9-87e7-a43ec3e5fcbe {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.507633] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Creating linked-clone VM from snapshot {{(pid=70020) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1400.507974] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-8cb0f3d7-f912-4da1-9295-cf94bc8da968 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.517134] env[70020]: DEBUG oslo_vmware.api [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1400.517134] env[70020]: value = "task-3619371" [ 1400.517134] env[70020]: _type = "Task" [ 1400.517134] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.524935] env[70020]: DEBUG oslo_vmware.api [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619371, 'name': CloneVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.030952] env[70020]: DEBUG oslo_vmware.api [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619371, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.527629] env[70020]: DEBUG oslo_vmware.api [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619371, 'name': CloneVM_Task, 'duration_secs': 0.902639} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.527916] env[70020]: INFO nova.virt.vmwareapi.vmops [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Created linked-clone VM from snapshot [ 1401.528663] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33b17ddf-0d6c-4be1-98a4-bf0b7d95ec27 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.537155] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Uploading image 3dff7d67-9ffa-405b-b757-e35457283a71 {{(pid=70020) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1401.560900] env[70020]: DEBUG oslo_vmware.rw_handles [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1401.560900] env[70020]: value = "vm-721856" [ 1401.560900] env[70020]: _type = "VirtualMachine" [ 1401.560900] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1401.561143] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-49eb9c32-c340-449a-b76b-d501b5b02787 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.567026] env[70020]: DEBUG oslo_vmware.rw_handles [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lease: (returnval){ [ 1401.567026] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5248ccb1-031a-1d2e-2038-8bfa46a55770" [ 1401.567026] env[70020]: _type = "HttpNfcLease" [ 1401.567026] env[70020]: } obtained for exporting VM: (result){ [ 1401.567026] env[70020]: value = "vm-721856" [ 1401.567026] env[70020]: _type = "VirtualMachine" [ 1401.567026] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1401.567265] env[70020]: DEBUG oslo_vmware.api [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the lease: (returnval){ [ 1401.567265] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5248ccb1-031a-1d2e-2038-8bfa46a55770" [ 1401.567265] env[70020]: _type = "HttpNfcLease" [ 1401.567265] env[70020]: } to be ready. {{(pid=70020) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1401.572907] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1401.572907] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5248ccb1-031a-1d2e-2038-8bfa46a55770" [ 1401.572907] env[70020]: _type = "HttpNfcLease" [ 1401.572907] env[70020]: } is initializing. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1402.076437] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1402.076437] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5248ccb1-031a-1d2e-2038-8bfa46a55770" [ 1402.076437] env[70020]: _type = "HttpNfcLease" [ 1402.076437] env[70020]: } is ready. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1402.076702] env[70020]: DEBUG oslo_vmware.rw_handles [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1402.076702] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5248ccb1-031a-1d2e-2038-8bfa46a55770" [ 1402.076702] env[70020]: _type = "HttpNfcLease" [ 1402.076702] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1402.077426] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6547affd-f300-4a0c-91ad-e36baabdce91 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.084229] env[70020]: DEBUG oslo_vmware.rw_handles [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529a0c64-d364-12cf-0154-e9830a2d2058/disk-0.vmdk from lease info. {{(pid=70020) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1402.084398] env[70020]: DEBUG oslo_vmware.rw_handles [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529a0c64-d364-12cf-0154-e9830a2d2058/disk-0.vmdk for reading. {{(pid=70020) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1402.169133] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-1354307f-e579-4060-9de6-c51cc956f33b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.599331] env[70020]: DEBUG oslo_concurrency.lockutils [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquiring lock "3dc276f3-3f80-4732-a76d-560e6b057a56" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1403.601031] env[70020]: DEBUG oslo_concurrency.lockutils [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lock "3dc276f3-3f80-4732-a76d-560e6b057a56" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1403.601031] env[70020]: INFO nova.compute.manager [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Shelving [ 1404.610468] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1404.610845] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4b36a39f-9797-4620-9d04-14b58db2b0f3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.619262] env[70020]: DEBUG oslo_vmware.api [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1404.619262] env[70020]: value = "task-3619373" [ 1404.619262] env[70020]: _type = "Task" [ 1404.619262] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.628177] env[70020]: DEBUG oslo_vmware.api [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619373, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.129593] env[70020]: DEBUG oslo_vmware.api [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619373, 'name': PowerOffVM_Task, 'duration_secs': 0.198031} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.129850] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1405.130656] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28d0df51-b2d0-47cd-b4f3-866a696f7da2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.149437] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1195c524-280a-432c-8c97-db648b940fb7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.660999] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Creating Snapshot of the VM instance {{(pid=70020) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1405.661374] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c0cfa24a-58e9-4505-b9bc-55d781b21188 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.669727] env[70020]: DEBUG oslo_vmware.api [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1405.669727] env[70020]: value = "task-3619374" [ 1405.669727] env[70020]: _type = "Task" [ 1405.669727] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.679686] env[70020]: DEBUG oslo_vmware.api [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619374, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.179839] env[70020]: DEBUG oslo_vmware.api [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619374, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.680670] env[70020]: DEBUG oslo_vmware.api [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619374, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.181699] env[70020]: DEBUG oslo_vmware.api [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619374, 'name': CreateSnapshot_Task, 'duration_secs': 1.329945} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.181969] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Created Snapshot of the VM instance {{(pid=70020) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1407.182740] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b733c906-d93a-4052-a8a9-2618ebdaabb5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.701161] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Creating linked-clone VM from snapshot {{(pid=70020) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1407.701161] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-790c44be-e91a-472d-b92f-6aaabe244d3f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.710117] env[70020]: DEBUG oslo_vmware.api [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1407.710117] env[70020]: value = "task-3619375" [ 1407.710117] env[70020]: _type = "Task" [ 1407.710117] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.718440] env[70020]: DEBUG oslo_vmware.api [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619375, 'name': CloneVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.220336] env[70020]: DEBUG oslo_vmware.api [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619375, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.720577] env[70020]: DEBUG oslo_vmware.api [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619375, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.222011] env[70020]: DEBUG oslo_vmware.api [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619375, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.723016] env[70020]: DEBUG oslo_vmware.api [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619375, 'name': CloneVM_Task} progress is 95%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.224923] env[70020]: DEBUG oslo_vmware.api [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619375, 'name': CloneVM_Task, 'duration_secs': 2.131461} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.225267] env[70020]: INFO nova.virt.vmwareapi.vmops [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Created linked-clone VM from snapshot [ 1410.226097] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddbc49e6-2a8b-4c0b-8247-575cd84746d9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.233577] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Uploading image c586dd81-3de4-4742-bffd-494d74a05131 {{(pid=70020) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1410.255863] env[70020]: DEBUG oslo_vmware.rw_handles [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1410.255863] env[70020]: value = "vm-721858" [ 1410.255863] env[70020]: _type = "VirtualMachine" [ 1410.255863] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1410.256175] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-57a2e907-2e1b-40fb-8101-a968dd240305 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.262938] env[70020]: DEBUG oslo_vmware.rw_handles [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lease: (returnval){ [ 1410.262938] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52b34260-936f-567a-70c2-5eca6b5db2a5" [ 1410.262938] env[70020]: _type = "HttpNfcLease" [ 1410.262938] env[70020]: } obtained for exporting VM: (result){ [ 1410.262938] env[70020]: value = "vm-721858" [ 1410.262938] env[70020]: _type = "VirtualMachine" [ 1410.262938] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1410.263178] env[70020]: DEBUG oslo_vmware.api [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the lease: (returnval){ [ 1410.263178] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52b34260-936f-567a-70c2-5eca6b5db2a5" [ 1410.263178] env[70020]: _type = "HttpNfcLease" [ 1410.263178] env[70020]: } to be ready. {{(pid=70020) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1410.269707] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1410.269707] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52b34260-936f-567a-70c2-5eca6b5db2a5" [ 1410.269707] env[70020]: _type = "HttpNfcLease" [ 1410.269707] env[70020]: } is initializing. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1410.661255] env[70020]: DEBUG oslo_vmware.rw_handles [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529a0c64-d364-12cf-0154-e9830a2d2058/disk-0.vmdk. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1410.662180] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4edf082-d20f-43e7-81e7-cdc76c7fcea3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.668297] env[70020]: DEBUG oslo_vmware.rw_handles [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529a0c64-d364-12cf-0154-e9830a2d2058/disk-0.vmdk is in state: ready. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1410.668460] env[70020]: ERROR oslo_vmware.rw_handles [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529a0c64-d364-12cf-0154-e9830a2d2058/disk-0.vmdk due to incomplete transfer. [ 1410.668682] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-80ed50a0-c8c8-45c4-8e21-1a09fc9896d6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.675378] env[70020]: DEBUG oslo_vmware.rw_handles [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529a0c64-d364-12cf-0154-e9830a2d2058/disk-0.vmdk. {{(pid=70020) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1410.675564] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Uploaded image 3dff7d67-9ffa-405b-b757-e35457283a71 to the Glance image server {{(pid=70020) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1410.677959] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Destroying the VM {{(pid=70020) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1410.678185] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-c06ae064-619b-4185-a678-50da054275e8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.683989] env[70020]: DEBUG oslo_vmware.api [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1410.683989] env[70020]: value = "task-3619377" [ 1410.683989] env[70020]: _type = "Task" [ 1410.683989] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.691189] env[70020]: DEBUG oslo_vmware.api [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619377, 'name': Destroy_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.771764] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1410.771764] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52b34260-936f-567a-70c2-5eca6b5db2a5" [ 1410.771764] env[70020]: _type = "HttpNfcLease" [ 1410.771764] env[70020]: } is ready. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1410.772252] env[70020]: DEBUG oslo_vmware.rw_handles [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1410.772252] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52b34260-936f-567a-70c2-5eca6b5db2a5" [ 1410.772252] env[70020]: _type = "HttpNfcLease" [ 1410.772252] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1410.772769] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bc38b85-881b-4015-99b5-2bafe6971795 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.779881] env[70020]: DEBUG oslo_vmware.rw_handles [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525b651e-3f05-5eae-73f5-c4eee68622e0/disk-0.vmdk from lease info. {{(pid=70020) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1410.780058] env[70020]: DEBUG oslo_vmware.rw_handles [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525b651e-3f05-5eae-73f5-c4eee68622e0/disk-0.vmdk for reading. {{(pid=70020) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1410.872516] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e5c50784-5937-40fb-8c68-e887815aef13 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.195216] env[70020]: DEBUG oslo_vmware.api [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619377, 'name': Destroy_Task, 'duration_secs': 0.33823} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.195481] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Destroyed the VM [ 1411.195707] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Deleting Snapshot of the VM instance {{(pid=70020) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1411.195980] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-3e3415a9-e54b-48d1-8a52-54a9aec485cc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.202214] env[70020]: DEBUG oslo_vmware.api [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1411.202214] env[70020]: value = "task-3619378" [ 1411.202214] env[70020]: _type = "Task" [ 1411.202214] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.210065] env[70020]: DEBUG oslo_vmware.api [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619378, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.711950] env[70020]: DEBUG oslo_vmware.api [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619378, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.212494] env[70020]: DEBUG oslo_vmware.api [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619378, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.716031] env[70020]: DEBUG oslo_vmware.api [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619378, 'name': RemoveSnapshot_Task, 'duration_secs': 1.088722} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.716031] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Deleted Snapshot of the VM instance {{(pid=70020) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1412.716352] env[70020]: INFO nova.compute.manager [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Took 13.77 seconds to snapshot the instance on the hypervisor. [ 1413.262084] env[70020]: DEBUG nova.compute.manager [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Found 3 images (rotation: 2) {{(pid=70020) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1413.262396] env[70020]: DEBUG nova.compute.manager [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Rotating out 1 backups {{(pid=70020) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5025}} [ 1413.262496] env[70020]: DEBUG nova.compute.manager [None req-b9956c07-61b0-4654-a715-8c5fcb3486eb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Deleting image c87219f6-2a31-43b9-a4f3-aceca50b8e81 {{(pid=70020) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5030}} [ 1414.492014] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fb710841-2204-443d-bb83-d0cc258c96d4 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "422aa98b-fa01-42c5-90cf-ed70e9781208" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1414.492014] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fb710841-2204-443d-bb83-d0cc258c96d4 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "422aa98b-fa01-42c5-90cf-ed70e9781208" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1414.492396] env[70020]: DEBUG nova.compute.manager [None req-fb710841-2204-443d-bb83-d0cc258c96d4 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1414.493330] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aff0b9e5-e47e-4c93-911f-84d35952362f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.501674] env[70020]: DEBUG nova.compute.manager [None req-fb710841-2204-443d-bb83-d0cc258c96d4 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=70020) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1414.502311] env[70020]: DEBUG nova.objects.instance [None req-fb710841-2204-443d-bb83-d0cc258c96d4 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lazy-loading 'flavor' on Instance uuid 422aa98b-fa01-42c5-90cf-ed70e9781208 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1415.509933] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb710841-2204-443d-bb83-d0cc258c96d4 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1415.510328] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fcacd5b4-6c24-48db-a732-091b29b051da {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.517383] env[70020]: DEBUG oslo_vmware.api [None req-fb710841-2204-443d-bb83-d0cc258c96d4 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1415.517383] env[70020]: value = "task-3619379" [ 1415.517383] env[70020]: _type = "Task" [ 1415.517383] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.525855] env[70020]: DEBUG oslo_vmware.api [None req-fb710841-2204-443d-bb83-d0cc258c96d4 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619379, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.028301] env[70020]: DEBUG oslo_vmware.api [None req-fb710841-2204-443d-bb83-d0cc258c96d4 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619379, 'name': PowerOffVM_Task, 'duration_secs': 0.229277} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.028599] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb710841-2204-443d-bb83-d0cc258c96d4 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1416.028817] env[70020]: DEBUG nova.compute.manager [None req-fb710841-2204-443d-bb83-d0cc258c96d4 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1416.029707] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0afa47be-f98c-44ad-a9bc-13289a0fa5a4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.542100] env[70020]: DEBUG oslo_concurrency.lockutils [None req-fb710841-2204-443d-bb83-d0cc258c96d4 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "422aa98b-fa01-42c5-90cf-ed70e9781208" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.049s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1417.745546] env[70020]: DEBUG nova.compute.manager [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Stashing vm_state: stopped {{(pid=70020) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1418.265119] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1418.265406] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1418.303862] env[70020]: DEBUG oslo_vmware.rw_handles [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525b651e-3f05-5eae-73f5-c4eee68622e0/disk-0.vmdk. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1418.304712] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc7e99f4-418f-488a-94f2-fde0cbcc76bd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.311340] env[70020]: DEBUG oslo_vmware.rw_handles [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525b651e-3f05-5eae-73f5-c4eee68622e0/disk-0.vmdk is in state: ready. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1418.311499] env[70020]: ERROR oslo_vmware.rw_handles [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525b651e-3f05-5eae-73f5-c4eee68622e0/disk-0.vmdk due to incomplete transfer. [ 1418.311709] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-611a9229-5f1f-4488-b50b-04b5c4e7c9cf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.318637] env[70020]: DEBUG oslo_vmware.rw_handles [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525b651e-3f05-5eae-73f5-c4eee68622e0/disk-0.vmdk. {{(pid=70020) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1418.318859] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Uploaded image c586dd81-3de4-4742-bffd-494d74a05131 to the Glance image server {{(pid=70020) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1418.321169] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Destroying the VM {{(pid=70020) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1418.321387] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-632be2d3-93fa-43c2-8282-ecc13f79bf35 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.327504] env[70020]: DEBUG oslo_vmware.api [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1418.327504] env[70020]: value = "task-3619380" [ 1418.327504] env[70020]: _type = "Task" [ 1418.327504] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.334932] env[70020]: DEBUG oslo_vmware.api [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619380, 'name': Destroy_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.770398] env[70020]: INFO nova.compute.claims [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1418.836416] env[70020]: DEBUG oslo_vmware.api [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619380, 'name': Destroy_Task, 'duration_secs': 0.458211} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.836676] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Destroyed the VM [ 1418.836909] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Deleting Snapshot of the VM instance {{(pid=70020) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1418.837169] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-851926b9-cb88-494a-b18a-28ef9da68fab {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.842856] env[70020]: DEBUG oslo_vmware.api [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1418.842856] env[70020]: value = "task-3619381" [ 1418.842856] env[70020]: _type = "Task" [ 1418.842856] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.850066] env[70020]: DEBUG oslo_vmware.api [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619381, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.276655] env[70020]: INFO nova.compute.resource_tracker [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Updating resource usage from migration aaf89f96-36ef-4cf0-aa3b-56d58d42baac [ 1419.321402] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c51288d7-d3be-49a0-8b87-a4d1208b407f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.328885] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea2e875e-729a-4844-ba9d-c7627d1ae5db {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.361049] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c7d3150-82ea-4a37-b814-6706863a333d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.367882] env[70020]: DEBUG oslo_vmware.api [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619381, 'name': RemoveSnapshot_Task, 'duration_secs': 0.413242} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.369951] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Deleted Snapshot of the VM instance {{(pid=70020) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1419.370248] env[70020]: DEBUG nova.compute.manager [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1419.370959] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b3f777-78be-4487-93e9-20f9ba976c8c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.374035] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-942d9698-7548-4233-a439-4fb44e8b19e3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.389781] env[70020]: DEBUG nova.compute.provider_tree [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1419.894468] env[70020]: DEBUG nova.scheduler.client.report [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1419.897474] env[70020]: INFO nova.compute.manager [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Shelve offloading [ 1420.400054] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.134s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1420.400298] env[70020]: INFO nova.compute.manager [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Migrating [ 1420.406886] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1420.409634] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-be010bf6-f8d7-4a3c-bde1-e91058682b3b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.423048] env[70020]: DEBUG oslo_vmware.api [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1420.423048] env[70020]: value = "task-3619382" [ 1420.423048] env[70020]: _type = "Task" [ 1420.423048] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.432053] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] VM already powered off {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1420.432255] env[70020]: DEBUG nova.compute.manager [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1420.432965] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34b27912-50d0-4296-aafd-524490ffd927 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.438829] env[70020]: DEBUG oslo_concurrency.lockutils [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquiring lock "refresh_cache-3dc276f3-3f80-4732-a76d-560e6b057a56" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1420.438998] env[70020]: DEBUG oslo_concurrency.lockutils [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquired lock "refresh_cache-3dc276f3-3f80-4732-a76d-560e6b057a56" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1420.439183] env[70020]: DEBUG nova.network.neutron [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1420.916143] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "refresh_cache-422aa98b-fa01-42c5-90cf-ed70e9781208" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1420.916405] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquired lock "refresh_cache-422aa98b-fa01-42c5-90cf-ed70e9781208" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1420.916517] env[70020]: DEBUG nova.network.neutron [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1421.153291] env[70020]: DEBUG nova.network.neutron [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Updating instance_info_cache with network_info: [{"id": "d6a9de80-b7ec-45e7-9bad-0e115b06a3b9", "address": "fa:16:3e:52:7c:a2", "network": {"id": "ed5fa951-66b0-45af-8569-314b06003130", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1735933033-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11384e127368415d82f2e8a7e985b17e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "af454577-0e89-41a3-a9f2-f39716f62fd5", "external-id": "nsx-vlan-transportzone-63", "segmentation_id": 63, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6a9de80-b7", "ovs_interfaceid": "d6a9de80-b7ec-45e7-9bad-0e115b06a3b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1421.607685] env[70020]: DEBUG nova.network.neutron [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Updating instance_info_cache with network_info: [{"id": "c8b16039-3297-434c-ae9e-e75e6ee2965d", "address": "fa:16:3e:e4:c2:e7", "network": {"id": "e7ac5730-3f56-469f-b3b1-25a06edc1284", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-765496064-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b34ec8c1ad864be694a6f9ce2b8a7788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8b16039-32", "ovs_interfaceid": "c8b16039-3297-434c-ae9e-e75e6ee2965d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1421.655595] env[70020]: DEBUG oslo_concurrency.lockutils [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Releasing lock "refresh_cache-3dc276f3-3f80-4732-a76d-560e6b057a56" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1421.845419] env[70020]: DEBUG nova.compute.manager [req-48863920-ef05-4075-8ebb-264aede08f52 req-f65fce02-9b51-4a41-bbbb-d01402aaf420 service nova] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Received event network-vif-unplugged-d6a9de80-b7ec-45e7-9bad-0e115b06a3b9 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1421.845635] env[70020]: DEBUG oslo_concurrency.lockutils [req-48863920-ef05-4075-8ebb-264aede08f52 req-f65fce02-9b51-4a41-bbbb-d01402aaf420 service nova] Acquiring lock "3dc276f3-3f80-4732-a76d-560e6b057a56-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1421.845856] env[70020]: DEBUG oslo_concurrency.lockutils [req-48863920-ef05-4075-8ebb-264aede08f52 req-f65fce02-9b51-4a41-bbbb-d01402aaf420 service nova] Lock "3dc276f3-3f80-4732-a76d-560e6b057a56-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1421.846027] env[70020]: DEBUG oslo_concurrency.lockutils [req-48863920-ef05-4075-8ebb-264aede08f52 req-f65fce02-9b51-4a41-bbbb-d01402aaf420 service nova] Lock "3dc276f3-3f80-4732-a76d-560e6b057a56-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1421.846196] env[70020]: DEBUG nova.compute.manager [req-48863920-ef05-4075-8ebb-264aede08f52 req-f65fce02-9b51-4a41-bbbb-d01402aaf420 service nova] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] No waiting events found dispatching network-vif-unplugged-d6a9de80-b7ec-45e7-9bad-0e115b06a3b9 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1421.846361] env[70020]: WARNING nova.compute.manager [req-48863920-ef05-4075-8ebb-264aede08f52 req-f65fce02-9b51-4a41-bbbb-d01402aaf420 service nova] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Received unexpected event network-vif-unplugged-d6a9de80-b7ec-45e7-9bad-0e115b06a3b9 for instance with vm_state shelved and task_state shelving_offloading. [ 1421.934813] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1421.935790] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3056995d-4360-4faa-9872-d74a9a8bb18a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.943800] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1421.944055] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-416ecfec-f44a-4cf7-a36c-b14fd89e9dfd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.007421] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1422.007673] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1422.007858] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Deleting the datastore file [datastore1] 3dc276f3-3f80-4732-a76d-560e6b057a56 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1422.008172] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-415054ad-e39c-44f7-b7c7-a75163dc94b4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.014845] env[70020]: DEBUG oslo_vmware.api [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1422.014845] env[70020]: value = "task-3619384" [ 1422.014845] env[70020]: _type = "Task" [ 1422.014845] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.022397] env[70020]: DEBUG oslo_vmware.api [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619384, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.110703] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Releasing lock "refresh_cache-422aa98b-fa01-42c5-90cf-ed70e9781208" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1422.523710] env[70020]: DEBUG oslo_vmware.api [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619384, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143577} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.523962] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1422.524139] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1422.524321] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1422.546852] env[70020]: INFO nova.scheduler.client.report [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Deleted allocations for instance 3dc276f3-3f80-4732-a76d-560e6b057a56 [ 1423.051465] env[70020]: DEBUG oslo_concurrency.lockutils [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1423.051819] env[70020]: DEBUG oslo_concurrency.lockutils [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1423.052074] env[70020]: DEBUG nova.objects.instance [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lazy-loading 'resources' on Instance uuid 3dc276f3-3f80-4732-a76d-560e6b057a56 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1423.555793] env[70020]: DEBUG nova.objects.instance [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lazy-loading 'numa_topology' on Instance uuid 3dc276f3-3f80-4732-a76d-560e6b057a56 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1423.624783] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13e3d603-e489-45d4-8ed3-c7015507a9af {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.644315] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Updating instance '422aa98b-fa01-42c5-90cf-ed70e9781208' progress to 0 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1423.869717] env[70020]: DEBUG nova.compute.manager [req-3ab2b31b-f6c3-4f23-9fb6-3df953a532b1 req-c642ae7e-c0c8-4157-9f82-7ce1357bc37a service nova] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Received event network-changed-d6a9de80-b7ec-45e7-9bad-0e115b06a3b9 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1423.869943] env[70020]: DEBUG nova.compute.manager [req-3ab2b31b-f6c3-4f23-9fb6-3df953a532b1 req-c642ae7e-c0c8-4157-9f82-7ce1357bc37a service nova] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Refreshing instance network info cache due to event network-changed-d6a9de80-b7ec-45e7-9bad-0e115b06a3b9. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1423.870178] env[70020]: DEBUG oslo_concurrency.lockutils [req-3ab2b31b-f6c3-4f23-9fb6-3df953a532b1 req-c642ae7e-c0c8-4157-9f82-7ce1357bc37a service nova] Acquiring lock "refresh_cache-3dc276f3-3f80-4732-a76d-560e6b057a56" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1423.870320] env[70020]: DEBUG oslo_concurrency.lockutils [req-3ab2b31b-f6c3-4f23-9fb6-3df953a532b1 req-c642ae7e-c0c8-4157-9f82-7ce1357bc37a service nova] Acquired lock "refresh_cache-3dc276f3-3f80-4732-a76d-560e6b057a56" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1423.870476] env[70020]: DEBUG nova.network.neutron [req-3ab2b31b-f6c3-4f23-9fb6-3df953a532b1 req-c642ae7e-c0c8-4157-9f82-7ce1357bc37a service nova] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Refreshing network info cache for port d6a9de80-b7ec-45e7-9bad-0e115b06a3b9 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1424.058588] env[70020]: DEBUG nova.objects.base [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Object Instance<3dc276f3-3f80-4732-a76d-560e6b057a56> lazy-loaded attributes: resources,numa_topology {{(pid=70020) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1424.100039] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e75bbe3d-01d1-466d-8f96-5df07cebe6d3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.108272] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6a3a017-63ff-479d-a0ba-9b0d81cc0cd7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.137843] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aed40e5b-e7a1-470f-9e1c-8eadde930b86 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.145336] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2baa5f26-ef94-4d8d-bca9-ef9aaa4c9f8c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.150467] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1424.150706] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-22e29694-b327-4f60-a038-d7698118ae12 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.162082] env[70020]: DEBUG nova.compute.provider_tree [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1424.164602] env[70020]: DEBUG oslo_vmware.api [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1424.164602] env[70020]: value = "task-3619385" [ 1424.164602] env[70020]: _type = "Task" [ 1424.164602] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.173463] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] VM already powered off {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1424.174239] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Updating instance '422aa98b-fa01-42c5-90cf-ed70e9781208' progress to 17 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1424.559465] env[70020]: DEBUG nova.network.neutron [req-3ab2b31b-f6c3-4f23-9fb6-3df953a532b1 req-c642ae7e-c0c8-4157-9f82-7ce1357bc37a service nova] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Updated VIF entry in instance network info cache for port d6a9de80-b7ec-45e7-9bad-0e115b06a3b9. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1424.559822] env[70020]: DEBUG nova.network.neutron [req-3ab2b31b-f6c3-4f23-9fb6-3df953a532b1 req-c642ae7e-c0c8-4157-9f82-7ce1357bc37a service nova] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Updating instance_info_cache with network_info: [{"id": "d6a9de80-b7ec-45e7-9bad-0e115b06a3b9", "address": "fa:16:3e:52:7c:a2", "network": {"id": "ed5fa951-66b0-45af-8569-314b06003130", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1735933033-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11384e127368415d82f2e8a7e985b17e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapd6a9de80-b7", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1424.665844] env[70020]: DEBUG nova.scheduler.client.report [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1424.679395] env[70020]: DEBUG nova.virt.hardware [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1424.679603] env[70020]: DEBUG nova.virt.hardware [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1424.679754] env[70020]: DEBUG nova.virt.hardware [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1424.679939] env[70020]: DEBUG nova.virt.hardware [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1424.680091] env[70020]: DEBUG nova.virt.hardware [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1424.680238] env[70020]: DEBUG nova.virt.hardware [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1424.680435] env[70020]: DEBUG nova.virt.hardware [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1424.680588] env[70020]: DEBUG nova.virt.hardware [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1424.680745] env[70020]: DEBUG nova.virt.hardware [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1424.680900] env[70020]: DEBUG nova.virt.hardware [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1424.681078] env[70020]: DEBUG nova.virt.hardware [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1424.686915] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6e6c581-fafc-48e4-8996-5b21ca6cedbd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.706445] env[70020]: DEBUG oslo_vmware.api [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1424.706445] env[70020]: value = "task-3619386" [ 1424.706445] env[70020]: _type = "Task" [ 1424.706445] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.714756] env[70020]: DEBUG oslo_vmware.api [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619386, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.062106] env[70020]: DEBUG oslo_concurrency.lockutils [req-3ab2b31b-f6c3-4f23-9fb6-3df953a532b1 req-c642ae7e-c0c8-4157-9f82-7ce1357bc37a service nova] Releasing lock "refresh_cache-3dc276f3-3f80-4732-a76d-560e6b057a56" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1425.171457] env[70020]: DEBUG oslo_concurrency.lockutils [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.119s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1425.201724] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquiring lock "3dc276f3-3f80-4732-a76d-560e6b057a56" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1425.216757] env[70020]: DEBUG oslo_vmware.api [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619386, 'name': ReconfigVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.680339] env[70020]: DEBUG oslo_concurrency.lockutils [None req-40f6ff35-48dd-4d4f-a35e-53afd4006fc2 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lock "3dc276f3-3f80-4732-a76d-560e6b057a56" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 22.080s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1425.681233] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lock "3dc276f3-3f80-4732-a76d-560e6b057a56" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.480s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1425.681413] env[70020]: INFO nova.compute.manager [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Unshelving [ 1425.716216] env[70020]: DEBUG oslo_vmware.api [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619386, 'name': ReconfigVM_Task} progress is 99%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.217435] env[70020]: DEBUG oslo_vmware.api [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619386, 'name': ReconfigVM_Task, 'duration_secs': 1.148378} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.218265] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Updating instance '422aa98b-fa01-42c5-90cf-ed70e9781208' progress to 33 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1426.711023] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1426.711023] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1426.711023] env[70020]: DEBUG nova.objects.instance [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lazy-loading 'pci_requests' on Instance uuid 3dc276f3-3f80-4732-a76d-560e6b057a56 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1426.724350] env[70020]: DEBUG nova.virt.hardware [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1426.724537] env[70020]: DEBUG nova.virt.hardware [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1426.724688] env[70020]: DEBUG nova.virt.hardware [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1426.724866] env[70020]: DEBUG nova.virt.hardware [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1426.725015] env[70020]: DEBUG nova.virt.hardware [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1426.725162] env[70020]: DEBUG nova.virt.hardware [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1426.725359] env[70020]: DEBUG nova.virt.hardware [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1426.725511] env[70020]: DEBUG nova.virt.hardware [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1426.725670] env[70020]: DEBUG nova.virt.hardware [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1426.725828] env[70020]: DEBUG nova.virt.hardware [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1426.725996] env[70020]: DEBUG nova.virt.hardware [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1426.731254] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Reconfiguring VM instance instance-0000007d to detach disk 2000 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1426.731711] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4e03d4d6-c77d-4a83-9df0-b0bf62cc3aeb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.749761] env[70020]: DEBUG oslo_vmware.api [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1426.749761] env[70020]: value = "task-3619387" [ 1426.749761] env[70020]: _type = "Task" [ 1426.749761] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.757491] env[70020]: DEBUG oslo_vmware.api [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619387, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.214528] env[70020]: DEBUG nova.objects.instance [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lazy-loading 'numa_topology' on Instance uuid 3dc276f3-3f80-4732-a76d-560e6b057a56 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1427.259172] env[70020]: DEBUG oslo_vmware.api [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619387, 'name': ReconfigVM_Task, 'duration_secs': 0.152269} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.259528] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Reconfigured VM instance instance-0000007d to detach disk 2000 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1427.260217] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8663326b-61f4-4dc8-9c94-1bb5e6325054 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.281323] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Reconfiguring VM instance instance-0000007d to attach disk [datastore1] 422aa98b-fa01-42c5-90cf-ed70e9781208/422aa98b-fa01-42c5-90cf-ed70e9781208.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1427.281548] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4baac71e-9a85-4720-a13d-518e1788af8a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.298925] env[70020]: DEBUG oslo_vmware.api [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1427.298925] env[70020]: value = "task-3619388" [ 1427.298925] env[70020]: _type = "Task" [ 1427.298925] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.305847] env[70020]: DEBUG oslo_vmware.api [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619388, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.717315] env[70020]: INFO nova.compute.claims [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1427.808454] env[70020]: DEBUG oslo_vmware.api [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619388, 'name': ReconfigVM_Task, 'duration_secs': 0.254668} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.808669] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Reconfigured VM instance instance-0000007d to attach disk [datastore1] 422aa98b-fa01-42c5-90cf-ed70e9781208/422aa98b-fa01-42c5-90cf-ed70e9781208.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1427.808954] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Updating instance '422aa98b-fa01-42c5-90cf-ed70e9781208' progress to 50 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1428.315622] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b362d1d5-b8f4-4492-8775-f574259cf378 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.334934] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e102ec57-9f26-4c1d-8c73-cad0d1cd2ff1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.352367] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Updating instance '422aa98b-fa01-42c5-90cf-ed70e9781208' progress to 67 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1428.773308] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27a1070b-c39b-483a-ac6e-5d089f0af2e9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.780562] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5446a8ea-8f55-436a-9dcd-44120875430f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.811261] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f1a5329-1592-40bc-90bf-aa93a247de29 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.818288] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd8bfcf1-ce0d-46ae-adc3-fd0cce09c10b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.831130] env[70020]: DEBUG nova.compute.provider_tree [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1428.889178] env[70020]: DEBUG nova.network.neutron [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Port c8b16039-3297-434c-ae9e-e75e6ee2965d binding to destination host cpu-1 is already ACTIVE {{(pid=70020) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1429.334348] env[70020]: DEBUG nova.scheduler.client.report [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1429.840049] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.129s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1429.867171] env[70020]: INFO nova.network.neutron [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Updating port d6a9de80-b7ec-45e7-9bad-0e115b06a3b9 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1429.909225] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "422aa98b-fa01-42c5-90cf-ed70e9781208-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1429.909434] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "422aa98b-fa01-42c5-90cf-ed70e9781208-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1429.909604] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "422aa98b-fa01-42c5-90cf-ed70e9781208-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1430.943858] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "refresh_cache-422aa98b-fa01-42c5-90cf-ed70e9781208" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1430.944082] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquired lock "refresh_cache-422aa98b-fa01-42c5-90cf-ed70e9781208" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1430.944234] env[70020]: DEBUG nova.network.neutron [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1431.217977] env[70020]: DEBUG nova.compute.manager [req-76348406-d905-43f8-a1e8-26c2e927524d req-645474ca-9a39-4320-8e5c-f0f565bcc200 service nova] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Received event network-vif-plugged-d6a9de80-b7ec-45e7-9bad-0e115b06a3b9 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1431.218277] env[70020]: DEBUG oslo_concurrency.lockutils [req-76348406-d905-43f8-a1e8-26c2e927524d req-645474ca-9a39-4320-8e5c-f0f565bcc200 service nova] Acquiring lock "3dc276f3-3f80-4732-a76d-560e6b057a56-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1431.218418] env[70020]: DEBUG oslo_concurrency.lockutils [req-76348406-d905-43f8-a1e8-26c2e927524d req-645474ca-9a39-4320-8e5c-f0f565bcc200 service nova] Lock "3dc276f3-3f80-4732-a76d-560e6b057a56-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1431.218579] env[70020]: DEBUG oslo_concurrency.lockutils [req-76348406-d905-43f8-a1e8-26c2e927524d req-645474ca-9a39-4320-8e5c-f0f565bcc200 service nova] Lock "3dc276f3-3f80-4732-a76d-560e6b057a56-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1431.218735] env[70020]: DEBUG nova.compute.manager [req-76348406-d905-43f8-a1e8-26c2e927524d req-645474ca-9a39-4320-8e5c-f0f565bcc200 service nova] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] No waiting events found dispatching network-vif-plugged-d6a9de80-b7ec-45e7-9bad-0e115b06a3b9 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1431.218931] env[70020]: WARNING nova.compute.manager [req-76348406-d905-43f8-a1e8-26c2e927524d req-645474ca-9a39-4320-8e5c-f0f565bcc200 service nova] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Received unexpected event network-vif-plugged-d6a9de80-b7ec-45e7-9bad-0e115b06a3b9 for instance with vm_state shelved_offloaded and task_state spawning. [ 1431.292806] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquiring lock "refresh_cache-3dc276f3-3f80-4732-a76d-560e6b057a56" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1431.293031] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquired lock "refresh_cache-3dc276f3-3f80-4732-a76d-560e6b057a56" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1431.293171] env[70020]: DEBUG nova.network.neutron [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1431.625881] env[70020]: DEBUG nova.network.neutron [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Updating instance_info_cache with network_info: [{"id": "c8b16039-3297-434c-ae9e-e75e6ee2965d", "address": "fa:16:3e:e4:c2:e7", "network": {"id": "e7ac5730-3f56-469f-b3b1-25a06edc1284", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-765496064-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b34ec8c1ad864be694a6f9ce2b8a7788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8b16039-32", "ovs_interfaceid": "c8b16039-3297-434c-ae9e-e75e6ee2965d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1432.123657] env[70020]: DEBUG nova.network.neutron [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Updating instance_info_cache with network_info: [{"id": "d6a9de80-b7ec-45e7-9bad-0e115b06a3b9", "address": "fa:16:3e:52:7c:a2", "network": {"id": "ed5fa951-66b0-45af-8569-314b06003130", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1735933033-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11384e127368415d82f2e8a7e985b17e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "af454577-0e89-41a3-a9f2-f39716f62fd5", "external-id": "nsx-vlan-transportzone-63", "segmentation_id": 63, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6a9de80-b7", "ovs_interfaceid": "d6a9de80-b7ec-45e7-9bad-0e115b06a3b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1432.128631] env[70020]: DEBUG oslo_concurrency.lockutils [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Releasing lock "refresh_cache-422aa98b-fa01-42c5-90cf-ed70e9781208" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1432.625984] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Releasing lock "refresh_cache-3dc276f3-3f80-4732-a76d-560e6b057a56" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1432.651291] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da0ea0c0-1cc0-4074-aa8f-ab661983a890 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.656045] env[70020]: DEBUG nova.virt.hardware [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='8948c7fc458cd8dfe8045f816a14f3f0',container_format='bare',created_at=2025-04-25T23:14:24Z,direct_url=,disk_format='vmdk',id=c586dd81-3de4-4742-bffd-494d74a05131,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1391159863-shelved',owner='11384e127368415d82f2e8a7e985b17e',properties=ImageMetaProps,protected=,size=31667712,status='active',tags=,updated_at=2025-04-25T23:14:40Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1432.656269] env[70020]: DEBUG nova.virt.hardware [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1432.656425] env[70020]: DEBUG nova.virt.hardware [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1432.656604] env[70020]: DEBUG nova.virt.hardware [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1432.656748] env[70020]: DEBUG nova.virt.hardware [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1432.656902] env[70020]: DEBUG nova.virt.hardware [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1432.657144] env[70020]: DEBUG nova.virt.hardware [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1432.657306] env[70020]: DEBUG nova.virt.hardware [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1432.657466] env[70020]: DEBUG nova.virt.hardware [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1432.657621] env[70020]: DEBUG nova.virt.hardware [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1432.657787] env[70020]: DEBUG nova.virt.hardware [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1432.658511] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2692774c-b5c8-4342-a45f-84bd461cc3b7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.679409] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecb77bc5-38ff-4fd2-852c-87f9b2a85007 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.682725] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51197969-6e2b-40ff-845f-c5a120387854 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.690969] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Updating instance '422aa98b-fa01-42c5-90cf-ed70e9781208' progress to 83 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1432.701743] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:7c:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'af454577-0e89-41a3-a9f2-f39716f62fd5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd6a9de80-b7ec-45e7-9bad-0e115b06a3b9', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1432.708856] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1432.709290] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1432.709487] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-602f444a-3b08-4085-b4f8-8a1da7da6609 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.726931] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1432.726931] env[70020]: value = "task-3619389" [ 1432.726931] env[70020]: _type = "Task" [ 1432.726931] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.735970] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619389, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.211481] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-a15eb036-a645-410d-9c4c-dcd931b8cf5a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Updating instance '422aa98b-fa01-42c5-90cf-ed70e9781208' progress to 100 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1433.237394] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619389, 'name': CreateVM_Task, 'duration_secs': 0.287284} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.237561] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1433.238229] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c586dd81-3de4-4742-bffd-494d74a05131" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1433.238397] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c586dd81-3de4-4742-bffd-494d74a05131" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1433.238752] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c586dd81-3de4-4742-bffd-494d74a05131" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1433.239036] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c31e8e47-2b13-4a4b-a536-1c868beed339 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.242507] env[70020]: DEBUG nova.compute.manager [req-ad09bf71-e05c-45bb-8768-f7888635b58e req-b79822bd-7d95-4674-8249-9aa8f78eeeaf service nova] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Received event network-changed-d6a9de80-b7ec-45e7-9bad-0e115b06a3b9 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1433.242678] env[70020]: DEBUG nova.compute.manager [req-ad09bf71-e05c-45bb-8768-f7888635b58e req-b79822bd-7d95-4674-8249-9aa8f78eeeaf service nova] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Refreshing instance network info cache due to event network-changed-d6a9de80-b7ec-45e7-9bad-0e115b06a3b9. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1433.242878] env[70020]: DEBUG oslo_concurrency.lockutils [req-ad09bf71-e05c-45bb-8768-f7888635b58e req-b79822bd-7d95-4674-8249-9aa8f78eeeaf service nova] Acquiring lock "refresh_cache-3dc276f3-3f80-4732-a76d-560e6b057a56" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1433.243024] env[70020]: DEBUG oslo_concurrency.lockutils [req-ad09bf71-e05c-45bb-8768-f7888635b58e req-b79822bd-7d95-4674-8249-9aa8f78eeeaf service nova] Acquired lock "refresh_cache-3dc276f3-3f80-4732-a76d-560e6b057a56" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1433.243186] env[70020]: DEBUG nova.network.neutron [req-ad09bf71-e05c-45bb-8768-f7888635b58e req-b79822bd-7d95-4674-8249-9aa8f78eeeaf service nova] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Refreshing network info cache for port d6a9de80-b7ec-45e7-9bad-0e115b06a3b9 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1433.247133] env[70020]: DEBUG oslo_vmware.api [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1433.247133] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]520507e6-03b4-04df-de95-2b908b9d2a02" [ 1433.247133] env[70020]: _type = "Task" [ 1433.247133] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1433.255854] env[70020]: DEBUG oslo_vmware.api [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]520507e6-03b4-04df-de95-2b908b9d2a02, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.757307] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c586dd81-3de4-4742-bffd-494d74a05131" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1433.757501] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Processing image c586dd81-3de4-4742-bffd-494d74a05131 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1433.757735] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c586dd81-3de4-4742-bffd-494d74a05131/c586dd81-3de4-4742-bffd-494d74a05131.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1433.757883] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c586dd81-3de4-4742-bffd-494d74a05131/c586dd81-3de4-4742-bffd-494d74a05131.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1433.758070] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1433.758304] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8e437e2e-b24d-46c6-ac28-1cc61e5b396f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.766269] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1433.766435] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1433.767112] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e85aa55-b2cb-48a6-b81e-62e5df085cbe {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.772109] env[70020]: DEBUG oslo_vmware.api [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1433.772109] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52ab60a7-c4bd-8dd0-107f-d85d1ba93053" [ 1433.772109] env[70020]: _type = "Task" [ 1433.772109] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1433.780940] env[70020]: DEBUG oslo_vmware.api [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52ab60a7-c4bd-8dd0-107f-d85d1ba93053, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.932531] env[70020]: DEBUG nova.network.neutron [req-ad09bf71-e05c-45bb-8768-f7888635b58e req-b79822bd-7d95-4674-8249-9aa8f78eeeaf service nova] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Updated VIF entry in instance network info cache for port d6a9de80-b7ec-45e7-9bad-0e115b06a3b9. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1433.932879] env[70020]: DEBUG nova.network.neutron [req-ad09bf71-e05c-45bb-8768-f7888635b58e req-b79822bd-7d95-4674-8249-9aa8f78eeeaf service nova] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Updating instance_info_cache with network_info: [{"id": "d6a9de80-b7ec-45e7-9bad-0e115b06a3b9", "address": "fa:16:3e:52:7c:a2", "network": {"id": "ed5fa951-66b0-45af-8569-314b06003130", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1735933033-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11384e127368415d82f2e8a7e985b17e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "af454577-0e89-41a3-a9f2-f39716f62fd5", "external-id": "nsx-vlan-transportzone-63", "segmentation_id": 63, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6a9de80-b7", "ovs_interfaceid": "d6a9de80-b7ec-45e7-9bad-0e115b06a3b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1434.281853] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Preparing fetch location {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1434.282251] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Fetch image to [datastore2] OSTACK_IMG_2b19b122-fd29-4214-8917-fc1bed9a00e3/OSTACK_IMG_2b19b122-fd29-4214-8917-fc1bed9a00e3.vmdk {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1434.282326] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Downloading stream optimized image c586dd81-3de4-4742-bffd-494d74a05131 to [datastore2] OSTACK_IMG_2b19b122-fd29-4214-8917-fc1bed9a00e3/OSTACK_IMG_2b19b122-fd29-4214-8917-fc1bed9a00e3.vmdk on the data store datastore2 as vApp {{(pid=70020) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1434.282461] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Downloading image file data c586dd81-3de4-4742-bffd-494d74a05131 to the ESX as VM named 'OSTACK_IMG_2b19b122-fd29-4214-8917-fc1bed9a00e3' {{(pid=70020) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1434.351148] env[70020]: DEBUG oslo_vmware.rw_handles [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1434.351148] env[70020]: value = "resgroup-9" [ 1434.351148] env[70020]: _type = "ResourcePool" [ 1434.351148] env[70020]: }. {{(pid=70020) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1434.351419] env[70020]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-0213700a-30c8-48ce-b1cb-ce424477bbf1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.370718] env[70020]: DEBUG oslo_vmware.rw_handles [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lease: (returnval){ [ 1434.370718] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5259c432-9cb1-d55e-0f40-fc1c3aee1c82" [ 1434.370718] env[70020]: _type = "HttpNfcLease" [ 1434.370718] env[70020]: } obtained for vApp import into resource pool (val){ [ 1434.370718] env[70020]: value = "resgroup-9" [ 1434.370718] env[70020]: _type = "ResourcePool" [ 1434.370718] env[70020]: }. {{(pid=70020) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1434.371082] env[70020]: DEBUG oslo_vmware.api [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the lease: (returnval){ [ 1434.371082] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5259c432-9cb1-d55e-0f40-fc1c3aee1c82" [ 1434.371082] env[70020]: _type = "HttpNfcLease" [ 1434.371082] env[70020]: } to be ready. {{(pid=70020) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1434.378684] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1434.378684] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5259c432-9cb1-d55e-0f40-fc1c3aee1c82" [ 1434.378684] env[70020]: _type = "HttpNfcLease" [ 1434.378684] env[70020]: } is initializing. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1434.436013] env[70020]: DEBUG oslo_concurrency.lockutils [req-ad09bf71-e05c-45bb-8768-f7888635b58e req-b79822bd-7d95-4674-8249-9aa8f78eeeaf service nova] Releasing lock "refresh_cache-3dc276f3-3f80-4732-a76d-560e6b057a56" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1434.879790] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1434.879790] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5259c432-9cb1-d55e-0f40-fc1c3aee1c82" [ 1434.879790] env[70020]: _type = "HttpNfcLease" [ 1434.879790] env[70020]: } is ready. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1434.880099] env[70020]: DEBUG oslo_vmware.rw_handles [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1434.880099] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5259c432-9cb1-d55e-0f40-fc1c3aee1c82" [ 1434.880099] env[70020]: _type = "HttpNfcLease" [ 1434.880099] env[70020]: }. {{(pid=70020) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1434.880807] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a96f0d21-560d-41cd-9f3e-279386fd497b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.887844] env[70020]: DEBUG oslo_vmware.rw_handles [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a9e846-3819-c14f-1864-e16d46f3da9d/disk-0.vmdk from lease info. {{(pid=70020) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1434.888034] env[70020]: DEBUG oslo_vmware.rw_handles [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Creating HTTP connection to write to file with size = 31667712 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a9e846-3819-c14f-1864-e16d46f3da9d/disk-0.vmdk. {{(pid=70020) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1434.951668] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-de5cd1c6-f80d-4e6e-a6f6-38d6c02e94af {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.987950] env[70020]: DEBUG oslo_vmware.rw_handles [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Completed reading data from the image iterator. {{(pid=70020) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1435.988363] env[70020]: DEBUG oslo_vmware.rw_handles [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a9e846-3819-c14f-1864-e16d46f3da9d/disk-0.vmdk. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1435.989415] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ecbdd28-cbdd-4e90-ac7d-af2a4d26e153 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.996750] env[70020]: DEBUG oslo_vmware.rw_handles [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a9e846-3819-c14f-1864-e16d46f3da9d/disk-0.vmdk is in state: ready. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1435.996911] env[70020]: DEBUG oslo_vmware.rw_handles [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a9e846-3819-c14f-1864-e16d46f3da9d/disk-0.vmdk. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1435.997154] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-1e950830-33ff-47dc-a015-f54a26a4ea97 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.998870] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d08c1e4a-7db4-4239-971d-93cc24e0e254 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "422aa98b-fa01-42c5-90cf-ed70e9781208" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1435.999217] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d08c1e4a-7db4-4239-971d-93cc24e0e254 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "422aa98b-fa01-42c5-90cf-ed70e9781208" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1435.999431] env[70020]: DEBUG nova.compute.manager [None req-d08c1e4a-7db4-4239-971d-93cc24e0e254 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Going to confirm migration 9 {{(pid=70020) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1436.210379] env[70020]: DEBUG oslo_vmware.rw_handles [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a9e846-3819-c14f-1864-e16d46f3da9d/disk-0.vmdk. {{(pid=70020) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1436.210679] env[70020]: INFO nova.virt.vmwareapi.images [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Downloaded image file data c586dd81-3de4-4742-bffd-494d74a05131 [ 1436.211533] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69fa4017-77ae-4d8e-9774-94edf3361768 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.226742] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7685f8ed-6d18-420e-bb2f-af6d08d335b0 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.256785] env[70020]: INFO nova.virt.vmwareapi.images [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] The imported VM was unregistered [ 1436.259128] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Caching image {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1436.259357] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Creating directory with path [datastore2] devstack-image-cache_base/c586dd81-3de4-4742-bffd-494d74a05131 {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1436.259613] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4ad18b35-ba37-41d6-872d-436e10e01f7b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.269423] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Created directory with path [datastore2] devstack-image-cache_base/c586dd81-3de4-4742-bffd-494d74a05131 {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1436.269595] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_2b19b122-fd29-4214-8917-fc1bed9a00e3/OSTACK_IMG_2b19b122-fd29-4214-8917-fc1bed9a00e3.vmdk to [datastore2] devstack-image-cache_base/c586dd81-3de4-4742-bffd-494d74a05131/c586dd81-3de4-4742-bffd-494d74a05131.vmdk. {{(pid=70020) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1436.269813] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-536907a5-5469-4fbf-92fc-a83d88863e28 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.275144] env[70020]: DEBUG oslo_vmware.api [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1436.275144] env[70020]: value = "task-3619392" [ 1436.275144] env[70020]: _type = "Task" [ 1436.275144] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.282431] env[70020]: DEBUG oslo_vmware.api [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619392, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.576059] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d08c1e4a-7db4-4239-971d-93cc24e0e254 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "refresh_cache-422aa98b-fa01-42c5-90cf-ed70e9781208" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1436.576324] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d08c1e4a-7db4-4239-971d-93cc24e0e254 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquired lock "refresh_cache-422aa98b-fa01-42c5-90cf-ed70e9781208" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1436.576546] env[70020]: DEBUG nova.network.neutron [None req-d08c1e4a-7db4-4239-971d-93cc24e0e254 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1436.576768] env[70020]: DEBUG nova.objects.instance [None req-d08c1e4a-7db4-4239-971d-93cc24e0e254 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lazy-loading 'info_cache' on Instance uuid 422aa98b-fa01-42c5-90cf-ed70e9781208 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1436.784778] env[70020]: DEBUG oslo_vmware.api [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619392, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.286228] env[70020]: DEBUG oslo_vmware.api [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619392, 'name': MoveVirtualDisk_Task} progress is 49%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.788654] env[70020]: DEBUG oslo_vmware.api [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619392, 'name': MoveVirtualDisk_Task} progress is 71%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.869241] env[70020]: DEBUG nova.network.neutron [None req-d08c1e4a-7db4-4239-971d-93cc24e0e254 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Updating instance_info_cache with network_info: [{"id": "c8b16039-3297-434c-ae9e-e75e6ee2965d", "address": "fa:16:3e:e4:c2:e7", "network": {"id": "e7ac5730-3f56-469f-b3b1-25a06edc1284", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-765496064-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b34ec8c1ad864be694a6f9ce2b8a7788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8b16039-32", "ovs_interfaceid": "c8b16039-3297-434c-ae9e-e75e6ee2965d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1438.287317] env[70020]: DEBUG oslo_vmware.api [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619392, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.372472] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d08c1e4a-7db4-4239-971d-93cc24e0e254 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Releasing lock "refresh_cache-422aa98b-fa01-42c5-90cf-ed70e9781208" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1438.372851] env[70020]: DEBUG nova.objects.instance [None req-d08c1e4a-7db4-4239-971d-93cc24e0e254 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lazy-loading 'migration_context' on Instance uuid 422aa98b-fa01-42c5-90cf-ed70e9781208 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1438.787041] env[70020]: DEBUG oslo_vmware.api [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619392, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.328484} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.787309] env[70020]: INFO nova.virt.vmwareapi.ds_util [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_2b19b122-fd29-4214-8917-fc1bed9a00e3/OSTACK_IMG_2b19b122-fd29-4214-8917-fc1bed9a00e3.vmdk to [datastore2] devstack-image-cache_base/c586dd81-3de4-4742-bffd-494d74a05131/c586dd81-3de4-4742-bffd-494d74a05131.vmdk. [ 1438.787491] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Cleaning up location [datastore2] OSTACK_IMG_2b19b122-fd29-4214-8917-fc1bed9a00e3 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1438.787651] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_2b19b122-fd29-4214-8917-fc1bed9a00e3 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1438.787960] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cb899a5b-4635-4e91-984f-4a16f3804e60 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.794440] env[70020]: DEBUG oslo_vmware.api [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1438.794440] env[70020]: value = "task-3619393" [ 1438.794440] env[70020]: _type = "Task" [ 1438.794440] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.801493] env[70020]: DEBUG oslo_vmware.api [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619393, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.875686] env[70020]: DEBUG nova.objects.base [None req-d08c1e4a-7db4-4239-971d-93cc24e0e254 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Object Instance<422aa98b-fa01-42c5-90cf-ed70e9781208> lazy-loaded attributes: info_cache,migration_context {{(pid=70020) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1438.876619] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b758025f-c151-480e-a77f-8fb11d302f93 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.895842] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d28fe53e-df0e-49f1-ba7d-4404b6d16106 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.900729] env[70020]: DEBUG oslo_vmware.api [None req-d08c1e4a-7db4-4239-971d-93cc24e0e254 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1438.900729] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52e215bb-700a-e662-f9da-eca6afbcc831" [ 1438.900729] env[70020]: _type = "Task" [ 1438.900729] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.908064] env[70020]: DEBUG oslo_vmware.api [None req-d08c1e4a-7db4-4239-971d-93cc24e0e254 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52e215bb-700a-e662-f9da-eca6afbcc831, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.304626] env[70020]: DEBUG oslo_vmware.api [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619393, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067944} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.304984] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1439.305082] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c586dd81-3de4-4742-bffd-494d74a05131/c586dd81-3de4-4742-bffd-494d74a05131.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1439.305347] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c586dd81-3de4-4742-bffd-494d74a05131/c586dd81-3de4-4742-bffd-494d74a05131.vmdk to [datastore2] 3dc276f3-3f80-4732-a76d-560e6b057a56/3dc276f3-3f80-4732-a76d-560e6b057a56.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1439.305597] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-63a2e842-11cd-4c9b-8688-36cc020290b8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.312789] env[70020]: DEBUG oslo_vmware.api [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1439.312789] env[70020]: value = "task-3619394" [ 1439.312789] env[70020]: _type = "Task" [ 1439.312789] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.319936] env[70020]: DEBUG oslo_vmware.api [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619394, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.410130] env[70020]: DEBUG oslo_vmware.api [None req-d08c1e4a-7db4-4239-971d-93cc24e0e254 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52e215bb-700a-e662-f9da-eca6afbcc831, 'name': SearchDatastore_Task, 'duration_secs': 0.007199} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.410781] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d08c1e4a-7db4-4239-971d-93cc24e0e254 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1439.411029] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d08c1e4a-7db4-4239-971d-93cc24e0e254 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1439.822837] env[70020]: DEBUG oslo_vmware.api [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619394, 'name': CopyVirtualDisk_Task} progress is 24%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.974412] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc2b6c5c-71fb-4a58-9a87-38e3cb341ccd {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.982868] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21d865b5-9d62-4e25-bbc1-e63a6d03a7fb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.014064] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-150c9442-8976-4422-ac4c-2b792e36a7d1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.021777] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c06cf63-310d-4e5e-99a0-d783b5abc729 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.035435] env[70020]: DEBUG nova.compute.provider_tree [None req-d08c1e4a-7db4-4239-971d-93cc24e0e254 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1440.323016] env[70020]: DEBUG oslo_vmware.api [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619394, 'name': CopyVirtualDisk_Task} progress is 46%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.538779] env[70020]: DEBUG nova.scheduler.client.report [None req-d08c1e4a-7db4-4239-971d-93cc24e0e254 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1440.823165] env[70020]: DEBUG oslo_vmware.api [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619394, 'name': CopyVirtualDisk_Task} progress is 69%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.324864] env[70020]: DEBUG oslo_vmware.api [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619394, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.550528] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d08c1e4a-7db4-4239-971d-93cc24e0e254 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.139s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1441.550741] env[70020]: DEBUG nova.compute.manager [None req-d08c1e4a-7db4-4239-971d-93cc24e0e254 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=70020) _confirm_resize /opt/stack/nova/nova/compute/manager.py:5376}} [ 1441.825572] env[70020]: DEBUG oslo_vmware.api [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619394, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.16643} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.825831] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c586dd81-3de4-4742-bffd-494d74a05131/c586dd81-3de4-4742-bffd-494d74a05131.vmdk to [datastore2] 3dc276f3-3f80-4732-a76d-560e6b057a56/3dc276f3-3f80-4732-a76d-560e6b057a56.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1441.826623] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43911805-8e5d-45ec-9df7-ef3720fda2cc {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.521638] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Reconfiguring VM instance instance-0000007e to attach disk [datastore2] 3dc276f3-3f80-4732-a76d-560e6b057a56/3dc276f3-3f80-4732-a76d-560e6b057a56.vmdk or device None with type streamOptimized {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1442.523861] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-12d62a6b-4303-4d74-8509-d5eaa557454e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.544201] env[70020]: DEBUG oslo_vmware.api [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1442.544201] env[70020]: value = "task-3619395" [ 1442.544201] env[70020]: _type = "Task" [ 1442.544201] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.554569] env[70020]: DEBUG oslo_vmware.api [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619395, 'name': ReconfigVM_Task} progress is 6%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.587680] env[70020]: INFO nova.scheduler.client.report [None req-d08c1e4a-7db4-4239-971d-93cc24e0e254 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Deleted allocation for migration aaf89f96-36ef-4cf0-aa3b-56d58d42baac [ 1443.053793] env[70020]: DEBUG oslo_vmware.api [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619395, 'name': ReconfigVM_Task, 'duration_secs': 0.284619} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.054408] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Reconfigured VM instance instance-0000007e to attach disk [datastore2] 3dc276f3-3f80-4732-a76d-560e6b057a56/3dc276f3-3f80-4732-a76d-560e6b057a56.vmdk or device None with type streamOptimized {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1443.056063] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-98f9cf42-319c-4539-b854-445dd73416cf {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.060871] env[70020]: DEBUG oslo_vmware.api [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1443.060871] env[70020]: value = "task-3619396" [ 1443.060871] env[70020]: _type = "Task" [ 1443.060871] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.067935] env[70020]: DEBUG oslo_vmware.api [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619396, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.093172] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d08c1e4a-7db4-4239-971d-93cc24e0e254 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "422aa98b-fa01-42c5-90cf-ed70e9781208" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.094s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1443.144493] env[70020]: DEBUG nova.objects.instance [None req-e6e83deb-d764-4c36-8efb-c579d484e07a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lazy-loading 'flavor' on Instance uuid 422aa98b-fa01-42c5-90cf-ed70e9781208 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1443.571053] env[70020]: DEBUG oslo_vmware.api [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619396, 'name': Rename_Task, 'duration_secs': 0.149971} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.571356] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1443.571501] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-19b09673-64ad-49b7-bba3-ee09a23c1639 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.577264] env[70020]: DEBUG oslo_vmware.api [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1443.577264] env[70020]: value = "task-3619397" [ 1443.577264] env[70020]: _type = "Task" [ 1443.577264] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.584013] env[70020]: DEBUG oslo_vmware.api [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619397, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.649614] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e6e83deb-d764-4c36-8efb-c579d484e07a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "refresh_cache-422aa98b-fa01-42c5-90cf-ed70e9781208" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1443.649814] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e6e83deb-d764-4c36-8efb-c579d484e07a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquired lock "refresh_cache-422aa98b-fa01-42c5-90cf-ed70e9781208" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1443.649940] env[70020]: DEBUG nova.network.neutron [None req-e6e83deb-d764-4c36-8efb-c579d484e07a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1443.650145] env[70020]: DEBUG nova.objects.instance [None req-e6e83deb-d764-4c36-8efb-c579d484e07a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lazy-loading 'info_cache' on Instance uuid 422aa98b-fa01-42c5-90cf-ed70e9781208 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1444.087129] env[70020]: DEBUG oslo_vmware.api [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619397, 'name': PowerOnVM_Task, 'duration_secs': 0.463574} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.087343] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1444.134968] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager.update_available_resource {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1444.153589] env[70020]: DEBUG nova.objects.base [None req-e6e83deb-d764-4c36-8efb-c579d484e07a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Object Instance<422aa98b-fa01-42c5-90cf-ed70e9781208> lazy-loaded attributes: flavor,info_cache {{(pid=70020) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1444.196727] env[70020]: DEBUG nova.compute.manager [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1444.198084] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d11e3ce-1862-4448-84e3-6a23a9efc9c6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.639238] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1444.639596] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1444.639908] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1444.640186] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=70020) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1444.641990] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffde93fa-b1e1-4b84-83de-6f40ce64b7d1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.653581] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0fee793-b3db-4cad-91fc-80f702e9a920 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.674070] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e78391c-e94d-43b7-b377-6e75ea5c6933 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.680673] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-006aefe3-c450-415e-a319-5193b98164d5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.708793] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180430MB free_disk=76GB free_vcpus=48 pci_devices=None {{(pid=70020) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1444.708976] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1444.709151] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1444.721031] env[70020]: DEBUG oslo_concurrency.lockutils [None req-bf494406-3636-4def-bc15-3fe2f7fe5966 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lock "3dc276f3-3f80-4732-a76d-560e6b057a56" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 19.040s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1444.972504] env[70020]: DEBUG nova.network.neutron [None req-e6e83deb-d764-4c36-8efb-c579d484e07a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Updating instance_info_cache with network_info: [{"id": "c8b16039-3297-434c-ae9e-e75e6ee2965d", "address": "fa:16:3e:e4:c2:e7", "network": {"id": "e7ac5730-3f56-469f-b3b1-25a06edc1284", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-765496064-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b34ec8c1ad864be694a6f9ce2b8a7788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8b16039-32", "ovs_interfaceid": "c8b16039-3297-434c-ae9e-e75e6ee2965d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1445.475780] env[70020]: DEBUG oslo_concurrency.lockutils [None req-e6e83deb-d764-4c36-8efb-c579d484e07a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Releasing lock "refresh_cache-422aa98b-fa01-42c5-90cf-ed70e9781208" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1445.737670] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 422aa98b-fa01-42c5-90cf-ed70e9781208 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1445.737949] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 3dc276f3-3f80-4732-a76d-560e6b057a56 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1445.737992] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=70020) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1445.738138] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=960MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=70020) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1445.771871] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e03cfc41-92f5-403b-99a5-88199969d5f7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.779313] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10168616-d281-4eb3-a323-02740c40a963 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.809538] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1559bf47-c898-4cad-8f9c-654f20b6ed7f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.816702] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4b589b8-37eb-4b2d-8a71-a2c313f35a75 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.829508] env[70020]: DEBUG nova.compute.provider_tree [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1446.333050] env[70020]: DEBUG nova.scheduler.client.report [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1446.481980] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6e83deb-d764-4c36-8efb-c579d484e07a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1446.482305] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-46417da2-90ae-4b57-8c7d-7ba9048477ea {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.489887] env[70020]: DEBUG oslo_vmware.api [None req-e6e83deb-d764-4c36-8efb-c579d484e07a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1446.489887] env[70020]: value = "task-3619398" [ 1446.489887] env[70020]: _type = "Task" [ 1446.489887] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.497539] env[70020]: DEBUG oslo_vmware.api [None req-e6e83deb-d764-4c36-8efb-c579d484e07a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619398, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.838215] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=70020) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1446.838609] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.129s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1446.999751] env[70020]: DEBUG oslo_vmware.api [None req-e6e83deb-d764-4c36-8efb-c579d484e07a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619398, 'name': PowerOnVM_Task, 'duration_secs': 0.382364} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.000023] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6e83deb-d764-4c36-8efb-c579d484e07a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1447.000228] env[70020]: DEBUG nova.compute.manager [None req-e6e83deb-d764-4c36-8efb-c579d484e07a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1447.000974] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2d6599a-5a88-49b4-bd5e-7cb190d94936 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.774199] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ba0045d5-645a-429a-9bb1-4195db35d933 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "422aa98b-fa01-42c5-90cf-ed70e9781208" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1447.774887] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ba0045d5-645a-429a-9bb1-4195db35d933 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "422aa98b-fa01-42c5-90cf-ed70e9781208" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1447.774887] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ba0045d5-645a-429a-9bb1-4195db35d933 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "422aa98b-fa01-42c5-90cf-ed70e9781208-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1447.774887] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ba0045d5-645a-429a-9bb1-4195db35d933 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "422aa98b-fa01-42c5-90cf-ed70e9781208-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1447.775188] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ba0045d5-645a-429a-9bb1-4195db35d933 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "422aa98b-fa01-42c5-90cf-ed70e9781208-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1447.777352] env[70020]: INFO nova.compute.manager [None req-ba0045d5-645a-429a-9bb1-4195db35d933 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Terminating instance [ 1448.280740] env[70020]: DEBUG nova.compute.manager [None req-ba0045d5-645a-429a-9bb1-4195db35d933 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1448.281177] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ba0045d5-645a-429a-9bb1-4195db35d933 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1448.281919] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c63e0893-a2df-4f19-88b8-d74d83ea9f2a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.290054] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba0045d5-645a-429a-9bb1-4195db35d933 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1448.290301] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8b138579-d325-4cc3-a046-4d2a907377cb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.296435] env[70020]: DEBUG oslo_vmware.api [None req-ba0045d5-645a-429a-9bb1-4195db35d933 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1448.296435] env[70020]: value = "task-3619399" [ 1448.296435] env[70020]: _type = "Task" [ 1448.296435] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.304316] env[70020]: DEBUG oslo_vmware.api [None req-ba0045d5-645a-429a-9bb1-4195db35d933 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619399, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.807112] env[70020]: DEBUG oslo_vmware.api [None req-ba0045d5-645a-429a-9bb1-4195db35d933 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619399, 'name': PowerOffVM_Task, 'duration_secs': 0.188411} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.807400] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba0045d5-645a-429a-9bb1-4195db35d933 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1448.807676] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ba0045d5-645a-429a-9bb1-4195db35d933 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1448.807807] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4584904d-e4a2-46b7-97f2-67cb34e9807f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.833235] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1448.833438] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1448.833600] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1448.833747] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1448.833923] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1448.834079] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=70020) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1448.868019] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ba0045d5-645a-429a-9bb1-4195db35d933 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1448.868266] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ba0045d5-645a-429a-9bb1-4195db35d933 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1448.868420] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba0045d5-645a-429a-9bb1-4195db35d933 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Deleting the datastore file [datastore1] 422aa98b-fa01-42c5-90cf-ed70e9781208 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1448.868675] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-07df5296-3974-45d8-848a-0f051ed0f60c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.875655] env[70020]: DEBUG oslo_vmware.api [None req-ba0045d5-645a-429a-9bb1-4195db35d933 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1448.875655] env[70020]: value = "task-3619401" [ 1448.875655] env[70020]: _type = "Task" [ 1448.875655] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.882884] env[70020]: DEBUG oslo_vmware.api [None req-ba0045d5-645a-429a-9bb1-4195db35d933 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619401, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.135849] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1449.386237] env[70020]: DEBUG oslo_vmware.api [None req-ba0045d5-645a-429a-9bb1-4195db35d933 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619401, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165212} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.386555] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba0045d5-645a-429a-9bb1-4195db35d933 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1449.386597] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ba0045d5-645a-429a-9bb1-4195db35d933 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1449.386764] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ba0045d5-645a-429a-9bb1-4195db35d933 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1449.386931] env[70020]: INFO nova.compute.manager [None req-ba0045d5-645a-429a-9bb1-4195db35d933 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1449.387325] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ba0045d5-645a-429a-9bb1-4195db35d933 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1449.387535] env[70020]: DEBUG nova.compute.manager [-] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1449.387629] env[70020]: DEBUG nova.network.neutron [-] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1449.821112] env[70020]: DEBUG nova.compute.manager [req-55db87ca-acfb-4ee2-afc2-f84275c4060d req-2d67874b-d7f7-46e2-82af-b04af33ae405 service nova] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Received event network-vif-deleted-c8b16039-3297-434c-ae9e-e75e6ee2965d {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1449.821335] env[70020]: INFO nova.compute.manager [req-55db87ca-acfb-4ee2-afc2-f84275c4060d req-2d67874b-d7f7-46e2-82af-b04af33ae405 service nova] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Neutron deleted interface c8b16039-3297-434c-ae9e-e75e6ee2965d; detaching it from the instance and deleting it from the info cache [ 1449.821538] env[70020]: DEBUG nova.network.neutron [req-55db87ca-acfb-4ee2-afc2-f84275c4060d req-2d67874b-d7f7-46e2-82af-b04af33ae405 service nova] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1450.306035] env[70020]: DEBUG nova.network.neutron [-] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1450.324413] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7ac56e8a-a15f-4841-9485-b622182f603c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.335103] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65981722-2397-4e8a-b4a1-1fee4ff7595e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.360986] env[70020]: DEBUG nova.compute.manager [req-55db87ca-acfb-4ee2-afc2-f84275c4060d req-2d67874b-d7f7-46e2-82af-b04af33ae405 service nova] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Detach interface failed, port_id=c8b16039-3297-434c-ae9e-e75e6ee2965d, reason: Instance 422aa98b-fa01-42c5-90cf-ed70e9781208 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1450.808974] env[70020]: INFO nova.compute.manager [-] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Took 1.42 seconds to deallocate network for instance. [ 1451.315947] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ba0045d5-645a-429a-9bb1-4195db35d933 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1451.316250] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ba0045d5-645a-429a-9bb1-4195db35d933 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1451.316468] env[70020]: DEBUG nova.objects.instance [None req-ba0045d5-645a-429a-9bb1-4195db35d933 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lazy-loading 'resources' on Instance uuid 422aa98b-fa01-42c5-90cf-ed70e9781208 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1451.894276] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea0a04e3-139f-46bc-b913-63642df51c71 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.902046] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60f07dd0-3dbe-490e-91d7-11776cd0baa1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.930992] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-325f08c9-43ed-40d7-8d86-d22b2fc1e7a9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.938297] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0dbbbab-4c31-4456-9d1e-8d971f6b8ead {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.951150] env[70020]: DEBUG nova.compute.provider_tree [None req-ba0045d5-645a-429a-9bb1-4195db35d933 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1452.454152] env[70020]: DEBUG nova.scheduler.client.report [None req-ba0045d5-645a-429a-9bb1-4195db35d933 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1452.959980] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ba0045d5-645a-429a-9bb1-4195db35d933 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.644s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1452.980290] env[70020]: INFO nova.scheduler.client.report [None req-ba0045d5-645a-429a-9bb1-4195db35d933 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Deleted allocations for instance 422aa98b-fa01-42c5-90cf-ed70e9781208 [ 1453.135348] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1453.135560] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Cleaning up deleted instances {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11834}} [ 1453.489074] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ba0045d5-645a-429a-9bb1-4195db35d933 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "422aa98b-fa01-42c5-90cf-ed70e9781208" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.714s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1453.643547] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] There are 25 instances to clean {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11843}} [ 1453.643726] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 422aa98b-fa01-42c5-90cf-ed70e9781208] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1454.147203] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 4b73ae75-c403-4268-8eab-4d6c32aef950] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1454.650802] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 845ea37a-9945-49cd-a1bd-3da91f4af16b] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1454.812626] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "99ca95d2-0fd4-49ee-9f91-389420c7a4ac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1454.812864] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "99ca95d2-0fd4-49ee-9f91-389420c7a4ac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1455.153782] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: e96aae17-5ae5-404b-bbe3-46777f7c34d2] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1455.316069] env[70020]: DEBUG nova.compute.manager [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1455.656689] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: d28f6dff-8f9f-41d4-87ae-0ff87327d042] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1455.835608] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1455.835876] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1455.837345] env[70020]: INFO nova.compute.claims [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1456.159686] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 602328f7-258a-44f5-802c-d580824beea0] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1456.663932] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 115a8a58-d3ce-4778-9bc7-c75d0007b499] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1456.880410] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6866fd02-6695-481b-ad74-86874ac783d4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.887819] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63dc55c4-12d7-4e34-84ca-8e2e8f952ef3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.916199] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d97780a7-537e-447a-b64a-7265f30a8fd8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.922883] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92c66baa-3248-4c3e-a66e-c87fcc4d1fe4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.935269] env[70020]: DEBUG nova.compute.provider_tree [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1457.168032] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 399d55b7-2a79-4849-89b6-ff8d1c0d33e1] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1457.438342] env[70020]: DEBUG nova.scheduler.client.report [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1457.670947] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: c9a3fb0f-95bf-4b51-ac06-99415acfa9cb] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1457.942830] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.107s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1457.943520] env[70020]: DEBUG nova.compute.manager [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1458.173969] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: c29d577e-9498-40b1-8e49-caff821cb80a] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1458.448378] env[70020]: DEBUG nova.compute.utils [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1458.449809] env[70020]: DEBUG nova.compute.manager [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1458.449980] env[70020]: DEBUG nova.network.neutron [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1458.502260] env[70020]: DEBUG nova.policy [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '274a4150c13f4ec0b34194f12b995f25', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b34ec8c1ad864be694a6f9ce2b8a7788', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1458.679341] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 1b25f8db-457e-4948-b9da-35e2fa5b897e] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1458.750509] env[70020]: DEBUG nova.network.neutron [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Successfully created port: 2c631bbd-dc11-42e5-9974-ae8b9992a1a5 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1458.953089] env[70020]: DEBUG nova.compute.manager [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1459.182605] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: ca63297c-b7bc-45e9-8850-f46050905c26] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1459.686194] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 85da90b5-c3cc-4e35-8c86-6aca07992a09] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1459.962575] env[70020]: DEBUG nova.compute.manager [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1459.991150] env[70020]: DEBUG nova.virt.hardware [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1459.991423] env[70020]: DEBUG nova.virt.hardware [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1459.991578] env[70020]: DEBUG nova.virt.hardware [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1459.991755] env[70020]: DEBUG nova.virt.hardware [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1459.991898] env[70020]: DEBUG nova.virt.hardware [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1459.992052] env[70020]: DEBUG nova.virt.hardware [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1459.992272] env[70020]: DEBUG nova.virt.hardware [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1459.992457] env[70020]: DEBUG nova.virt.hardware [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1459.992629] env[70020]: DEBUG nova.virt.hardware [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1459.992786] env[70020]: DEBUG nova.virt.hardware [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1459.992954] env[70020]: DEBUG nova.virt.hardware [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1459.993825] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c3ea780-145e-4eda-9573-ee36a6adc0ff {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.001710] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa4a371-b9c8-48c4-bd92-6338de1f514d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.093962] env[70020]: DEBUG nova.compute.manager [req-551fa31a-0895-448a-9c2e-612b3c76cef3 req-129633f5-82ff-4922-9ece-2d89a7797355 service nova] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Received event network-vif-plugged-2c631bbd-dc11-42e5-9974-ae8b9992a1a5 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1460.094212] env[70020]: DEBUG oslo_concurrency.lockutils [req-551fa31a-0895-448a-9c2e-612b3c76cef3 req-129633f5-82ff-4922-9ece-2d89a7797355 service nova] Acquiring lock "99ca95d2-0fd4-49ee-9f91-389420c7a4ac-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1460.094483] env[70020]: DEBUG oslo_concurrency.lockutils [req-551fa31a-0895-448a-9c2e-612b3c76cef3 req-129633f5-82ff-4922-9ece-2d89a7797355 service nova] Lock "99ca95d2-0fd4-49ee-9f91-389420c7a4ac-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1460.094594] env[70020]: DEBUG oslo_concurrency.lockutils [req-551fa31a-0895-448a-9c2e-612b3c76cef3 req-129633f5-82ff-4922-9ece-2d89a7797355 service nova] Lock "99ca95d2-0fd4-49ee-9f91-389420c7a4ac-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1460.094779] env[70020]: DEBUG nova.compute.manager [req-551fa31a-0895-448a-9c2e-612b3c76cef3 req-129633f5-82ff-4922-9ece-2d89a7797355 service nova] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] No waiting events found dispatching network-vif-plugged-2c631bbd-dc11-42e5-9974-ae8b9992a1a5 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1460.094907] env[70020]: WARNING nova.compute.manager [req-551fa31a-0895-448a-9c2e-612b3c76cef3 req-129633f5-82ff-4922-9ece-2d89a7797355 service nova] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Received unexpected event network-vif-plugged-2c631bbd-dc11-42e5-9974-ae8b9992a1a5 for instance with vm_state building and task_state spawning. [ 1460.171471] env[70020]: DEBUG nova.network.neutron [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Successfully updated port: 2c631bbd-dc11-42e5-9974-ae8b9992a1a5 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1460.189751] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: cc46e905-958e-4dc3-8f83-f8b5680f94de] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1460.675271] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "refresh_cache-99ca95d2-0fd4-49ee-9f91-389420c7a4ac" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1460.675428] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquired lock "refresh_cache-99ca95d2-0fd4-49ee-9f91-389420c7a4ac" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1460.675610] env[70020]: DEBUG nova.network.neutron [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1460.693186] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 9962b718-ca31-4f09-91f3-133dd68612ad] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1461.196100] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 899183eb-ba25-491f-b981-77a33239ed74] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1461.206439] env[70020]: DEBUG nova.network.neutron [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1461.322303] env[70020]: DEBUG nova.network.neutron [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Updating instance_info_cache with network_info: [{"id": "2c631bbd-dc11-42e5-9974-ae8b9992a1a5", "address": "fa:16:3e:bb:c1:e4", "network": {"id": "e7ac5730-3f56-469f-b3b1-25a06edc1284", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-765496064-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b34ec8c1ad864be694a6f9ce2b8a7788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c631bbd-dc", "ovs_interfaceid": "2c631bbd-dc11-42e5-9974-ae8b9992a1a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1461.699167] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: c0a78ace-307e-4156-beb3-a53061acff7f] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1461.825145] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Releasing lock "refresh_cache-99ca95d2-0fd4-49ee-9f91-389420c7a4ac" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1461.825483] env[70020]: DEBUG nova.compute.manager [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Instance network_info: |[{"id": "2c631bbd-dc11-42e5-9974-ae8b9992a1a5", "address": "fa:16:3e:bb:c1:e4", "network": {"id": "e7ac5730-3f56-469f-b3b1-25a06edc1284", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-765496064-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b34ec8c1ad864be694a6f9ce2b8a7788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c631bbd-dc", "ovs_interfaceid": "2c631bbd-dc11-42e5-9974-ae8b9992a1a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1461.825897] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bb:c1:e4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1e7a4976-597e-4636-990e-6062b5faadee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2c631bbd-dc11-42e5-9974-ae8b9992a1a5', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1461.833328] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1461.833528] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1461.833740] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-094f9c13-22b1-472e-9f74-4d9564af7c17 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.854778] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1461.854778] env[70020]: value = "task-3619402" [ 1461.854778] env[70020]: _type = "Task" [ 1461.854778] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.865773] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619402, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.120075] env[70020]: DEBUG nova.compute.manager [req-8edd6b09-e334-4203-ad56-e4d04b109d9f req-b6478415-d666-4bcf-b093-b6c2f249bcd7 service nova] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Received event network-changed-2c631bbd-dc11-42e5-9974-ae8b9992a1a5 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1462.120285] env[70020]: DEBUG nova.compute.manager [req-8edd6b09-e334-4203-ad56-e4d04b109d9f req-b6478415-d666-4bcf-b093-b6c2f249bcd7 service nova] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Refreshing instance network info cache due to event network-changed-2c631bbd-dc11-42e5-9974-ae8b9992a1a5. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1462.120502] env[70020]: DEBUG oslo_concurrency.lockutils [req-8edd6b09-e334-4203-ad56-e4d04b109d9f req-b6478415-d666-4bcf-b093-b6c2f249bcd7 service nova] Acquiring lock "refresh_cache-99ca95d2-0fd4-49ee-9f91-389420c7a4ac" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1462.120642] env[70020]: DEBUG oslo_concurrency.lockutils [req-8edd6b09-e334-4203-ad56-e4d04b109d9f req-b6478415-d666-4bcf-b093-b6c2f249bcd7 service nova] Acquired lock "refresh_cache-99ca95d2-0fd4-49ee-9f91-389420c7a4ac" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1462.120801] env[70020]: DEBUG nova.network.neutron [req-8edd6b09-e334-4203-ad56-e4d04b109d9f req-b6478415-d666-4bcf-b093-b6c2f249bcd7 service nova] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Refreshing network info cache for port 2c631bbd-dc11-42e5-9974-ae8b9992a1a5 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1462.202692] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: ce4796b0-4ad2-4468-9898-aaedce6dcd32] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1462.363975] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619402, 'name': CreateVM_Task, 'duration_secs': 0.304814} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.364161] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1462.364794] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1462.364963] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1462.365292] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1462.365537] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7678207-823a-4cc8-949f-06996ea873f7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.370168] env[70020]: DEBUG oslo_vmware.api [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1462.370168] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52fec822-dba4-bf41-430d-8d2093021903" [ 1462.370168] env[70020]: _type = "Task" [ 1462.370168] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.377237] env[70020]: DEBUG oslo_vmware.api [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52fec822-dba4-bf41-430d-8d2093021903, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.705573] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: f9d4837f-0e3f-4a83-9055-04d17ef3eb23] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1462.792452] env[70020]: DEBUG nova.network.neutron [req-8edd6b09-e334-4203-ad56-e4d04b109d9f req-b6478415-d666-4bcf-b093-b6c2f249bcd7 service nova] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Updated VIF entry in instance network info cache for port 2c631bbd-dc11-42e5-9974-ae8b9992a1a5. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1462.792792] env[70020]: DEBUG nova.network.neutron [req-8edd6b09-e334-4203-ad56-e4d04b109d9f req-b6478415-d666-4bcf-b093-b6c2f249bcd7 service nova] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Updating instance_info_cache with network_info: [{"id": "2c631bbd-dc11-42e5-9974-ae8b9992a1a5", "address": "fa:16:3e:bb:c1:e4", "network": {"id": "e7ac5730-3f56-469f-b3b1-25a06edc1284", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-765496064-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b34ec8c1ad864be694a6f9ce2b8a7788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c631bbd-dc", "ovs_interfaceid": "2c631bbd-dc11-42e5-9974-ae8b9992a1a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1462.880454] env[70020]: DEBUG oslo_vmware.api [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52fec822-dba4-bf41-430d-8d2093021903, 'name': SearchDatastore_Task, 'duration_secs': 0.009569} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.880759] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1462.880987] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1462.881229] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1462.881378] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1462.881550] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1462.881800] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-428c89fd-1825-4396-8617-6e524eca4f53 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.889975] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1462.890158] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1462.890836] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e77068e1-86d8-487c-9aaf-e9c73a3d5683 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.895327] env[70020]: DEBUG oslo_vmware.api [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1462.895327] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5294160f-1dc4-03cb-90f2-f9fdf4423649" [ 1462.895327] env[70020]: _type = "Task" [ 1462.895327] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.901993] env[70020]: DEBUG oslo_vmware.api [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5294160f-1dc4-03cb-90f2-f9fdf4423649, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.208465] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 9e7bd10b-3a78-48d8-9b66-e3646635be6d] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1463.294870] env[70020]: DEBUG oslo_concurrency.lockutils [req-8edd6b09-e334-4203-ad56-e4d04b109d9f req-b6478415-d666-4bcf-b093-b6c2f249bcd7 service nova] Releasing lock "refresh_cache-99ca95d2-0fd4-49ee-9f91-389420c7a4ac" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1463.406249] env[70020]: DEBUG oslo_vmware.api [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5294160f-1dc4-03cb-90f2-f9fdf4423649, 'name': SearchDatastore_Task, 'duration_secs': 0.008164} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.406974] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43153635-ff56-4d5a-b725-9412673989c4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.411680] env[70020]: DEBUG oslo_vmware.api [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1463.411680] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]5207dcca-2a24-2d7d-3577-0d29537fea6f" [ 1463.411680] env[70020]: _type = "Task" [ 1463.411680] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.418534] env[70020]: DEBUG oslo_vmware.api [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5207dcca-2a24-2d7d-3577-0d29537fea6f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.712019] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 8dbb1de0-38de-493f-9512-b8754bab7bcb] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1463.922426] env[70020]: DEBUG oslo_vmware.api [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]5207dcca-2a24-2d7d-3577-0d29537fea6f, 'name': SearchDatastore_Task, 'duration_secs': 0.009066} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.922673] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1463.922923] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 99ca95d2-0fd4-49ee-9f91-389420c7a4ac/99ca95d2-0fd4-49ee-9f91-389420c7a4ac.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1463.923185] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f300bedc-c9e5-451e-9151-978fe679b64d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.930690] env[70020]: DEBUG oslo_vmware.api [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1463.930690] env[70020]: value = "task-3619403" [ 1463.930690] env[70020]: _type = "Task" [ 1463.930690] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.937850] env[70020]: DEBUG oslo_vmware.api [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619403, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.215181] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 5b69d3b2-c236-45f9-b35b-a9992b9c1c79] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1464.440028] env[70020]: DEBUG oslo_vmware.api [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619403, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.421472} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.440288] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] 99ca95d2-0fd4-49ee-9f91-389420c7a4ac/99ca95d2-0fd4-49ee-9f91-389420c7a4ac.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1464.440497] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1464.440743] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9d17ea34-8111-483d-8566-8d14bdf5fb2e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.446948] env[70020]: DEBUG oslo_vmware.api [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1464.446948] env[70020]: value = "task-3619404" [ 1464.446948] env[70020]: _type = "Task" [ 1464.446948] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.454050] env[70020]: DEBUG oslo_vmware.api [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619404, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.719477] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: b82378e5-7fd2-4a1c-8fbc-31fe9ed28b4d] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1464.956642] env[70020]: DEBUG oslo_vmware.api [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619404, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07714} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.956899] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1464.957654] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ce12acf-44fb-40fa-a53e-b7e9af9f985e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.980580] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Reconfiguring VM instance instance-0000007f to attach disk [datastore1] 99ca95d2-0fd4-49ee-9f91-389420c7a4ac/99ca95d2-0fd4-49ee-9f91-389420c7a4ac.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1464.980827] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-43f8deba-a19d-4229-af7a-a665909d3890 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.999836] env[70020]: DEBUG oslo_vmware.api [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1464.999836] env[70020]: value = "task-3619405" [ 1464.999836] env[70020]: _type = "Task" [ 1464.999836] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.007340] env[70020]: DEBUG oslo_vmware.api [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619405, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.223390] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 96966bf2-a9ff-48ba-be3f-c767e7b6eedd] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1465.509721] env[70020]: DEBUG oslo_vmware.api [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619405, 'name': ReconfigVM_Task, 'duration_secs': 0.292551} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1465.509988] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Reconfigured VM instance instance-0000007f to attach disk [datastore1] 99ca95d2-0fd4-49ee-9f91-389420c7a4ac/99ca95d2-0fd4-49ee-9f91-389420c7a4ac.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1465.510626] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-08ceef38-9267-482c-b6bd-e1d536de03e3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.517248] env[70020]: DEBUG oslo_vmware.api [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1465.517248] env[70020]: value = "task-3619406" [ 1465.517248] env[70020]: _type = "Task" [ 1465.517248] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.525627] env[70020]: DEBUG oslo_vmware.api [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619406, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.726573] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] [instance: 13f6daa5-d859-40ed-b1b0-edd7717b8df3] Instance has had 0 of 5 cleanup attempts {{(pid=70020) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1466.027432] env[70020]: DEBUG oslo_vmware.api [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619406, 'name': Rename_Task, 'duration_secs': 0.148139} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.027638] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1466.027860] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6f94c8f6-f02a-4edb-b869-50ba3bd44b01 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.033527] env[70020]: DEBUG oslo_vmware.api [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1466.033527] env[70020]: value = "task-3619407" [ 1466.033527] env[70020]: _type = "Task" [ 1466.033527] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.040589] env[70020]: DEBUG oslo_vmware.api [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619407, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.230049] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1466.230401] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Cleaning up deleted instances with incomplete migration {{(pid=70020) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11872}} [ 1466.543204] env[70020]: DEBUG oslo_vmware.api [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619407, 'name': PowerOnVM_Task, 'duration_secs': 0.444271} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.543455] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1466.543652] env[70020]: INFO nova.compute.manager [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Took 6.58 seconds to spawn the instance on the hypervisor. [ 1466.543825] env[70020]: DEBUG nova.compute.manager [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1466.544553] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0663e74c-4d3e-4fe3-b119-f79e535a6b0d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.733144] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1467.061817] env[70020]: INFO nova.compute.manager [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Took 11.24 seconds to build instance. [ 1467.563408] env[70020]: DEBUG oslo_concurrency.lockutils [None req-5a5bf1e2-34d5-43d0-bd4f-4f98f3d582c9 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "99ca95d2-0fd4-49ee-9f91-389420c7a4ac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.750s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1467.746752] env[70020]: DEBUG nova.compute.manager [req-92654ff1-664d-4147-9b78-4d775a08caba req-191ecc94-0329-4bdb-9de5-6c9a76bc2ecc service nova] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Received event network-changed-2c631bbd-dc11-42e5-9974-ae8b9992a1a5 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1467.746921] env[70020]: DEBUG nova.compute.manager [req-92654ff1-664d-4147-9b78-4d775a08caba req-191ecc94-0329-4bdb-9de5-6c9a76bc2ecc service nova] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Refreshing instance network info cache due to event network-changed-2c631bbd-dc11-42e5-9974-ae8b9992a1a5. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1467.747113] env[70020]: DEBUG oslo_concurrency.lockutils [req-92654ff1-664d-4147-9b78-4d775a08caba req-191ecc94-0329-4bdb-9de5-6c9a76bc2ecc service nova] Acquiring lock "refresh_cache-99ca95d2-0fd4-49ee-9f91-389420c7a4ac" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1467.747257] env[70020]: DEBUG oslo_concurrency.lockutils [req-92654ff1-664d-4147-9b78-4d775a08caba req-191ecc94-0329-4bdb-9de5-6c9a76bc2ecc service nova] Acquired lock "refresh_cache-99ca95d2-0fd4-49ee-9f91-389420c7a4ac" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1467.747411] env[70020]: DEBUG nova.network.neutron [req-92654ff1-664d-4147-9b78-4d775a08caba req-191ecc94-0329-4bdb-9de5-6c9a76bc2ecc service nova] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Refreshing network info cache for port 2c631bbd-dc11-42e5-9974-ae8b9992a1a5 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1468.442179] env[70020]: DEBUG nova.network.neutron [req-92654ff1-664d-4147-9b78-4d775a08caba req-191ecc94-0329-4bdb-9de5-6c9a76bc2ecc service nova] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Updated VIF entry in instance network info cache for port 2c631bbd-dc11-42e5-9974-ae8b9992a1a5. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1468.442532] env[70020]: DEBUG nova.network.neutron [req-92654ff1-664d-4147-9b78-4d775a08caba req-191ecc94-0329-4bdb-9de5-6c9a76bc2ecc service nova] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Updating instance_info_cache with network_info: [{"id": "2c631bbd-dc11-42e5-9974-ae8b9992a1a5", "address": "fa:16:3e:bb:c1:e4", "network": {"id": "e7ac5730-3f56-469f-b3b1-25a06edc1284", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-765496064-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b34ec8c1ad864be694a6f9ce2b8a7788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c631bbd-dc", "ovs_interfaceid": "2c631bbd-dc11-42e5-9974-ae8b9992a1a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1468.945776] env[70020]: DEBUG oslo_concurrency.lockutils [req-92654ff1-664d-4147-9b78-4d775a08caba req-191ecc94-0329-4bdb-9de5-6c9a76bc2ecc service nova] Releasing lock "refresh_cache-99ca95d2-0fd4-49ee-9f91-389420c7a4ac" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1469.235387] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1480.817681] env[70020]: DEBUG oslo_concurrency.lockutils [None req-41e4fdc7-3d22-474c-80a1-d081b9e386c4 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquiring lock "3dc276f3-3f80-4732-a76d-560e6b057a56" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1480.818212] env[70020]: DEBUG oslo_concurrency.lockutils [None req-41e4fdc7-3d22-474c-80a1-d081b9e386c4 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lock "3dc276f3-3f80-4732-a76d-560e6b057a56" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1480.818212] env[70020]: DEBUG oslo_concurrency.lockutils [None req-41e4fdc7-3d22-474c-80a1-d081b9e386c4 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquiring lock "3dc276f3-3f80-4732-a76d-560e6b057a56-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1480.819675] env[70020]: DEBUG oslo_concurrency.lockutils [None req-41e4fdc7-3d22-474c-80a1-d081b9e386c4 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lock "3dc276f3-3f80-4732-a76d-560e6b057a56-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1480.819875] env[70020]: DEBUG oslo_concurrency.lockutils [None req-41e4fdc7-3d22-474c-80a1-d081b9e386c4 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lock "3dc276f3-3f80-4732-a76d-560e6b057a56-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1480.821986] env[70020]: INFO nova.compute.manager [None req-41e4fdc7-3d22-474c-80a1-d081b9e386c4 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Terminating instance [ 1481.326091] env[70020]: DEBUG nova.compute.manager [None req-41e4fdc7-3d22-474c-80a1-d081b9e386c4 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1481.326300] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-41e4fdc7-3d22-474c-80a1-d081b9e386c4 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1481.327586] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d485f2c-959c-4616-bbfb-18ede25f5ed5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.335072] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-41e4fdc7-3d22-474c-80a1-d081b9e386c4 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1481.335299] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-548567bc-9440-4c82-b1b4-45bfeb82d9d2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.340809] env[70020]: DEBUG oslo_vmware.api [None req-41e4fdc7-3d22-474c-80a1-d081b9e386c4 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1481.340809] env[70020]: value = "task-3619408" [ 1481.340809] env[70020]: _type = "Task" [ 1481.340809] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.348548] env[70020]: DEBUG oslo_vmware.api [None req-41e4fdc7-3d22-474c-80a1-d081b9e386c4 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619408, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.850460] env[70020]: DEBUG oslo_vmware.api [None req-41e4fdc7-3d22-474c-80a1-d081b9e386c4 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619408, 'name': PowerOffVM_Task, 'duration_secs': 0.227304} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.850870] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-41e4fdc7-3d22-474c-80a1-d081b9e386c4 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1481.850912] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-41e4fdc7-3d22-474c-80a1-d081b9e386c4 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1481.851145] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8c4a20c0-beb5-4a8f-9d55-69d5b404b538 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.918803] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-41e4fdc7-3d22-474c-80a1-d081b9e386c4 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1481.919060] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-41e4fdc7-3d22-474c-80a1-d081b9e386c4 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Deleting contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1481.919227] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-41e4fdc7-3d22-474c-80a1-d081b9e386c4 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Deleting the datastore file [datastore2] 3dc276f3-3f80-4732-a76d-560e6b057a56 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1481.919508] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-799b8901-a135-4d02-960f-b83b4070278b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.925222] env[70020]: DEBUG oslo_vmware.api [None req-41e4fdc7-3d22-474c-80a1-d081b9e386c4 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for the task: (returnval){ [ 1481.925222] env[70020]: value = "task-3619410" [ 1481.925222] env[70020]: _type = "Task" [ 1481.925222] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.933270] env[70020]: DEBUG oslo_vmware.api [None req-41e4fdc7-3d22-474c-80a1-d081b9e386c4 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619410, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.436051] env[70020]: DEBUG oslo_vmware.api [None req-41e4fdc7-3d22-474c-80a1-d081b9e386c4 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Task: {'id': task-3619410, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.124278} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1482.436051] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-41e4fdc7-3d22-474c-80a1-d081b9e386c4 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1482.436051] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-41e4fdc7-3d22-474c-80a1-d081b9e386c4 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Deleted contents of the VM from datastore datastore2 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1482.436051] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-41e4fdc7-3d22-474c-80a1-d081b9e386c4 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1482.436051] env[70020]: INFO nova.compute.manager [None req-41e4fdc7-3d22-474c-80a1-d081b9e386c4 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1482.436051] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-41e4fdc7-3d22-474c-80a1-d081b9e386c4 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1482.436051] env[70020]: DEBUG nova.compute.manager [-] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1482.436051] env[70020]: DEBUG nova.network.neutron [-] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1482.882789] env[70020]: DEBUG nova.compute.manager [req-2d8b4c8a-03b8-42d8-bf9d-ecf02b517509 req-bdd8c05b-ec96-4a34-b274-0a6eb25e0361 service nova] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Received event network-vif-deleted-d6a9de80-b7ec-45e7-9bad-0e115b06a3b9 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1482.882789] env[70020]: INFO nova.compute.manager [req-2d8b4c8a-03b8-42d8-bf9d-ecf02b517509 req-bdd8c05b-ec96-4a34-b274-0a6eb25e0361 service nova] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Neutron deleted interface d6a9de80-b7ec-45e7-9bad-0e115b06a3b9; detaching it from the instance and deleting it from the info cache [ 1482.882789] env[70020]: DEBUG nova.network.neutron [req-2d8b4c8a-03b8-42d8-bf9d-ecf02b517509 req-bdd8c05b-ec96-4a34-b274-0a6eb25e0361 service nova] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1483.365395] env[70020]: DEBUG nova.network.neutron [-] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1483.385042] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0adce05b-975a-4602-8131-e3fce7e99eff {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.395884] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2a8e34d-4b05-432e-b8e6-18824b7fc45d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.422865] env[70020]: DEBUG nova.compute.manager [req-2d8b4c8a-03b8-42d8-bf9d-ecf02b517509 req-bdd8c05b-ec96-4a34-b274-0a6eb25e0361 service nova] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Detach interface failed, port_id=d6a9de80-b7ec-45e7-9bad-0e115b06a3b9, reason: Instance 3dc276f3-3f80-4732-a76d-560e6b057a56 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1483.868232] env[70020]: INFO nova.compute.manager [-] [instance: 3dc276f3-3f80-4732-a76d-560e6b057a56] Took 1.43 seconds to deallocate network for instance. [ 1484.374799] env[70020]: DEBUG oslo_concurrency.lockutils [None req-41e4fdc7-3d22-474c-80a1-d081b9e386c4 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1484.375225] env[70020]: DEBUG oslo_concurrency.lockutils [None req-41e4fdc7-3d22-474c-80a1-d081b9e386c4 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1484.375317] env[70020]: DEBUG nova.objects.instance [None req-41e4fdc7-3d22-474c-80a1-d081b9e386c4 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lazy-loading 'resources' on Instance uuid 3dc276f3-3f80-4732-a76d-560e6b057a56 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1484.918246] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc9bee18-cbed-4f50-ad06-5e0d49d9256e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.925771] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-565c608d-5c7a-424d-9827-47ce23ee45df {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.955420] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-665bc7e3-ab3e-4697-b823-61c5d528c51c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.962022] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7e1c61e-9ab1-4fcf-8db8-bb8a4f3de0e5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.974607] env[70020]: DEBUG nova.compute.provider_tree [None req-41e4fdc7-3d22-474c-80a1-d081b9e386c4 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1485.477822] env[70020]: DEBUG nova.scheduler.client.report [None req-41e4fdc7-3d22-474c-80a1-d081b9e386c4 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1485.984112] env[70020]: DEBUG oslo_concurrency.lockutils [None req-41e4fdc7-3d22-474c-80a1-d081b9e386c4 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.609s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1486.004149] env[70020]: INFO nova.scheduler.client.report [None req-41e4fdc7-3d22-474c-80a1-d081b9e386c4 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Deleted allocations for instance 3dc276f3-3f80-4732-a76d-560e6b057a56 [ 1486.512733] env[70020]: DEBUG oslo_concurrency.lockutils [None req-41e4fdc7-3d22-474c-80a1-d081b9e386c4 tempest-AttachVolumeShelveTestJSON-1114130730 tempest-AttachVolumeShelveTestJSON-1114130730-project-member] Lock "3dc276f3-3f80-4732-a76d-560e6b057a56" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.695s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1504.134745] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager.update_available_resource {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1504.638840] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1504.639068] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1504.639240] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1504.639399] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=70020) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1504.640383] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ba5f5a5-6a83-4bc9-a360-c3044a54dad4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.649321] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ed97aa0-e815-48a5-aaa5-1d1d8ae7310b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.663703] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a322a7e5-6c3f-47df-bb12-099ff6a3af39 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.670584] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2854c43e-c95f-4a89-8dfa-4bd7411ff2a1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.699455] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180913MB free_disk=76GB free_vcpus=48 pci_devices=None {{(pid=70020) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1504.699637] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1504.699857] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1505.582602] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f6879ee7-e94e-4107-9ccb-38b29e056dd2 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "99ca95d2-0fd4-49ee-9f91-389420c7a4ac" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1505.583328] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f6879ee7-e94e-4107-9ccb-38b29e056dd2 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "99ca95d2-0fd4-49ee-9f91-389420c7a4ac" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1505.727833] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance 99ca95d2-0fd4-49ee-9f91-389420c7a4ac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1505.727940] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=70020) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1505.728135] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=70020) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1505.753055] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bb90149-ff67-4baf-9344-e0be59699d53 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.761129] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c31071e1-b90d-4119-82f3-6ada050f30a6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.790097] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6025e9c-ce1d-4005-a3d2-791b98f655c5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.796769] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed54a3a1-f079-41af-a2c7-09c81ef3b715 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.809454] env[70020]: DEBUG nova.compute.provider_tree [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1506.086672] env[70020]: DEBUG nova.compute.utils [None req-f6879ee7-e94e-4107-9ccb-38b29e056dd2 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1506.312532] env[70020]: DEBUG nova.scheduler.client.report [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1506.590332] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f6879ee7-e94e-4107-9ccb-38b29e056dd2 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "99ca95d2-0fd4-49ee-9f91-389420c7a4ac" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1506.817378] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=70020) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1506.817559] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.118s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1507.651147] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f6879ee7-e94e-4107-9ccb-38b29e056dd2 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "99ca95d2-0fd4-49ee-9f91-389420c7a4ac" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1507.651467] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f6879ee7-e94e-4107-9ccb-38b29e056dd2 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "99ca95d2-0fd4-49ee-9f91-389420c7a4ac" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1507.651627] env[70020]: INFO nova.compute.manager [None req-f6879ee7-e94e-4107-9ccb-38b29e056dd2 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Attaching volume 086505f8-f56d-40ea-935a-06f022d2348c to /dev/sdb [ 1507.704321] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f36e8788-7e2c-4cfb-b4fc-b29b308f0103 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.711740] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74e77002-e70a-4ca0-b49d-30ae8a7bad00 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.725187] env[70020]: DEBUG nova.virt.block_device [None req-f6879ee7-e94e-4107-9ccb-38b29e056dd2 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Updating existing volume attachment record: 5cef8197-0829-41d0-88fe-bec91f92fe1e {{(pid=70020) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1509.818500] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1509.818914] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1509.818914] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1509.818993] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1509.819137] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1509.819276] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=70020) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1510.135479] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1512.267402] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6879ee7-e94e-4107-9ccb-38b29e056dd2 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Volume attach. Driver type: vmdk {{(pid=70020) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1512.267678] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6879ee7-e94e-4107-9ccb-38b29e056dd2 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721863', 'volume_id': '086505f8-f56d-40ea-935a-06f022d2348c', 'name': 'volume-086505f8-f56d-40ea-935a-06f022d2348c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '99ca95d2-0fd4-49ee-9f91-389420c7a4ac', 'attached_at': '', 'detached_at': '', 'volume_id': '086505f8-f56d-40ea-935a-06f022d2348c', 'serial': '086505f8-f56d-40ea-935a-06f022d2348c'} {{(pid=70020) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1512.268610] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a374b88d-97a5-44a2-969e-bb81d71e172f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.284826] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3496fb65-a6f8-4164-82e0-c819bd1e4026 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.308574] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6879ee7-e94e-4107-9ccb-38b29e056dd2 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Reconfiguring VM instance instance-0000007f to attach disk [datastore2] volume-086505f8-f56d-40ea-935a-06f022d2348c/volume-086505f8-f56d-40ea-935a-06f022d2348c.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1512.308809] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-512bd25d-1dda-4b41-be75-3f3717a30aca {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.325545] env[70020]: DEBUG oslo_vmware.api [None req-f6879ee7-e94e-4107-9ccb-38b29e056dd2 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1512.325545] env[70020]: value = "task-3619416" [ 1512.325545] env[70020]: _type = "Task" [ 1512.325545] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1512.332779] env[70020]: DEBUG oslo_vmware.api [None req-f6879ee7-e94e-4107-9ccb-38b29e056dd2 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619416, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.835205] env[70020]: DEBUG oslo_vmware.api [None req-f6879ee7-e94e-4107-9ccb-38b29e056dd2 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619416, 'name': ReconfigVM_Task, 'duration_secs': 0.340869} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1512.835486] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6879ee7-e94e-4107-9ccb-38b29e056dd2 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Reconfigured VM instance instance-0000007f to attach disk [datastore2] volume-086505f8-f56d-40ea-935a-06f022d2348c/volume-086505f8-f56d-40ea-935a-06f022d2348c.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1512.840007] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7f2580ff-1396-4223-b4b8-c9bc5ea3d3eb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.855758] env[70020]: DEBUG oslo_vmware.api [None req-f6879ee7-e94e-4107-9ccb-38b29e056dd2 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1512.855758] env[70020]: value = "task-3619417" [ 1512.855758] env[70020]: _type = "Task" [ 1512.855758] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1512.862929] env[70020]: DEBUG oslo_vmware.api [None req-f6879ee7-e94e-4107-9ccb-38b29e056dd2 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619417, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.365540] env[70020]: DEBUG oslo_vmware.api [None req-f6879ee7-e94e-4107-9ccb-38b29e056dd2 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619417, 'name': ReconfigVM_Task, 'duration_secs': 0.140217} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1513.365842] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6879ee7-e94e-4107-9ccb-38b29e056dd2 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721863', 'volume_id': '086505f8-f56d-40ea-935a-06f022d2348c', 'name': 'volume-086505f8-f56d-40ea-935a-06f022d2348c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '99ca95d2-0fd4-49ee-9f91-389420c7a4ac', 'attached_at': '', 'detached_at': '', 'volume_id': '086505f8-f56d-40ea-935a-06f022d2348c', 'serial': '086505f8-f56d-40ea-935a-06f022d2348c'} {{(pid=70020) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1514.399582] env[70020]: DEBUG nova.objects.instance [None req-f6879ee7-e94e-4107-9ccb-38b29e056dd2 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lazy-loading 'flavor' on Instance uuid 99ca95d2-0fd4-49ee-9f91-389420c7a4ac {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1514.905964] env[70020]: DEBUG oslo_concurrency.lockutils [None req-f6879ee7-e94e-4107-9ccb-38b29e056dd2 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "99ca95d2-0fd4-49ee-9f91-389420c7a4ac" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.254s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1515.135342] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1515.790891] env[70020]: DEBUG nova.compute.manager [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Stashing vm_state: active {{(pid=70020) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1516.310971] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1516.311286] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1516.816438] env[70020]: INFO nova.compute.claims [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1517.323058] env[70020]: INFO nova.compute.resource_tracker [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Updating resource usage from migration c5e39bb8-b426-4c48-89db-2d674f105f1e [ 1517.361821] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a611748b-f21c-4120-8166-c5b8674d3593 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.369058] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4fd8920-88b0-4d92-9de4-b2c8b9217791 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.398830] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a98fa6c7-e475-41e1-b2f9-28b9a161f725 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.405368] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-987aef6c-00e2-433b-a792-1a513339af4e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.417979] env[70020]: DEBUG nova.compute.provider_tree [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1517.921558] env[70020]: DEBUG nova.scheduler.client.report [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1518.426909] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.115s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1518.427161] env[70020]: INFO nova.compute.manager [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Migrating [ 1518.941116] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "refresh_cache-99ca95d2-0fd4-49ee-9f91-389420c7a4ac" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1518.941509] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquired lock "refresh_cache-99ca95d2-0fd4-49ee-9f91-389420c7a4ac" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1518.941556] env[70020]: DEBUG nova.network.neutron [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1519.893825] env[70020]: DEBUG nova.network.neutron [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Updating instance_info_cache with network_info: [{"id": "2c631bbd-dc11-42e5-9974-ae8b9992a1a5", "address": "fa:16:3e:bb:c1:e4", "network": {"id": "e7ac5730-3f56-469f-b3b1-25a06edc1284", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-765496064-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b34ec8c1ad864be694a6f9ce2b8a7788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c631bbd-dc", "ovs_interfaceid": "2c631bbd-dc11-42e5-9974-ae8b9992a1a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1520.130509] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1520.396612] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Releasing lock "refresh_cache-99ca95d2-0fd4-49ee-9f91-389420c7a4ac" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1521.912166] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85452e30-ce8b-4519-b8e2-0e98b4f29d1d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.933385] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Updating instance '99ca95d2-0fd4-49ee-9f91-389420c7a4ac' progress to 0 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1522.439618] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1522.439922] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-477f4770-2fae-4cc6-aab6-6c2e14f22b2f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.447160] env[70020]: DEBUG oslo_vmware.api [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1522.447160] env[70020]: value = "task-3619418" [ 1522.447160] env[70020]: _type = "Task" [ 1522.447160] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.455038] env[70020]: DEBUG oslo_vmware.api [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619418, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.957638] env[70020]: DEBUG oslo_vmware.api [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619418, 'name': PowerOffVM_Task, 'duration_secs': 0.175605} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.958042] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1522.958170] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Updating instance '99ca95d2-0fd4-49ee-9f91-389420c7a4ac' progress to 17 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1523.464597] env[70020]: DEBUG nova.virt.hardware [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1523.464863] env[70020]: DEBUG nova.virt.hardware [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1523.465031] env[70020]: DEBUG nova.virt.hardware [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1523.465223] env[70020]: DEBUG nova.virt.hardware [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1523.465368] env[70020]: DEBUG nova.virt.hardware [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1523.465513] env[70020]: DEBUG nova.virt.hardware [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1523.465715] env[70020]: DEBUG nova.virt.hardware [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1523.465869] env[70020]: DEBUG nova.virt.hardware [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1523.466043] env[70020]: DEBUG nova.virt.hardware [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1523.466208] env[70020]: DEBUG nova.virt.hardware [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1523.466375] env[70020]: DEBUG nova.virt.hardware [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1523.471274] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b98e6f8a-f057-4560-9983-5b2a4bc359ca {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.486678] env[70020]: DEBUG oslo_vmware.api [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1523.486678] env[70020]: value = "task-3619419" [ 1523.486678] env[70020]: _type = "Task" [ 1523.486678] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.494549] env[70020]: DEBUG oslo_vmware.api [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619419, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.996878] env[70020]: DEBUG oslo_vmware.api [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619419, 'name': ReconfigVM_Task, 'duration_secs': 0.206208} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.997292] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Updating instance '99ca95d2-0fd4-49ee-9f91-389420c7a4ac' progress to 33 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1524.503897] env[70020]: DEBUG nova.virt.hardware [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1524.504434] env[70020]: DEBUG nova.virt.hardware [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1524.504434] env[70020]: DEBUG nova.virt.hardware [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1524.504532] env[70020]: DEBUG nova.virt.hardware [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1524.504660] env[70020]: DEBUG nova.virt.hardware [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1524.504786] env[70020]: DEBUG nova.virt.hardware [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1524.504985] env[70020]: DEBUG nova.virt.hardware [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1524.505158] env[70020]: DEBUG nova.virt.hardware [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1524.505331] env[70020]: DEBUG nova.virt.hardware [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1524.505496] env[70020]: DEBUG nova.virt.hardware [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1524.505667] env[70020]: DEBUG nova.virt.hardware [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1524.510935] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Reconfiguring VM instance instance-0000007f to detach disk 2000 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1524.511259] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b73e6d8d-8f72-418b-b069-e9ddac2a3a7a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.529936] env[70020]: DEBUG oslo_vmware.api [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1524.529936] env[70020]: value = "task-3619420" [ 1524.529936] env[70020]: _type = "Task" [ 1524.529936] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.537600] env[70020]: DEBUG oslo_vmware.api [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619420, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.039780] env[70020]: DEBUG oslo_vmware.api [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619420, 'name': ReconfigVM_Task, 'duration_secs': 0.26907} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.040159] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Reconfigured VM instance instance-0000007f to detach disk 2000 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1525.040810] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e76c1af2-71a5-43d8-b9a3-639ae4f58f41 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.065647] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Reconfiguring VM instance instance-0000007f to attach disk [datastore1] 99ca95d2-0fd4-49ee-9f91-389420c7a4ac/99ca95d2-0fd4-49ee-9f91-389420c7a4ac.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1525.066199] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eaafda5d-1360-4213-8891-70974b62025b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.083326] env[70020]: DEBUG oslo_vmware.api [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1525.083326] env[70020]: value = "task-3619421" [ 1525.083326] env[70020]: _type = "Task" [ 1525.083326] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.090644] env[70020]: DEBUG oslo_vmware.api [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619421, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.592786] env[70020]: DEBUG oslo_vmware.api [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619421, 'name': ReconfigVM_Task, 'duration_secs': 0.287361} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.593072] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Reconfigured VM instance instance-0000007f to attach disk [datastore1] 99ca95d2-0fd4-49ee-9f91-389420c7a4ac/99ca95d2-0fd4-49ee-9f91-389420c7a4ac.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1525.593353] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Updating instance '99ca95d2-0fd4-49ee-9f91-389420c7a4ac' progress to 50 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1526.100591] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a0ac8d-f17a-400c-9588-b8eb6681930b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.122941] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fcab20f-3e56-4843-af93-e938da14785c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.142243] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Updating instance '99ca95d2-0fd4-49ee-9f91-389420c7a4ac' progress to 67 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1527.789359] env[70020]: DEBUG nova.network.neutron [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Port 2c631bbd-dc11-42e5-9974-ae8b9992a1a5 binding to destination host cpu-1 is already ACTIVE {{(pid=70020) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1528.811691] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "99ca95d2-0fd4-49ee-9f91-389420c7a4ac-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1528.812122] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "99ca95d2-0fd4-49ee-9f91-389420c7a4ac-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1528.812122] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "99ca95d2-0fd4-49ee-9f91-389420c7a4ac-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1529.847708] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "refresh_cache-99ca95d2-0fd4-49ee-9f91-389420c7a4ac" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1529.847966] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquired lock "refresh_cache-99ca95d2-0fd4-49ee-9f91-389420c7a4ac" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1529.848103] env[70020]: DEBUG nova.network.neutron [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1530.546774] env[70020]: DEBUG nova.network.neutron [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Updating instance_info_cache with network_info: [{"id": "2c631bbd-dc11-42e5-9974-ae8b9992a1a5", "address": "fa:16:3e:bb:c1:e4", "network": {"id": "e7ac5730-3f56-469f-b3b1-25a06edc1284", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-765496064-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b34ec8c1ad864be694a6f9ce2b8a7788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c631bbd-dc", "ovs_interfaceid": "2c631bbd-dc11-42e5-9974-ae8b9992a1a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1531.049704] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Releasing lock "refresh_cache-99ca95d2-0fd4-49ee-9f91-389420c7a4ac" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1531.558719] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6537e1f3-1d41-4c49-b552-b71983ea0dce {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.565558] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edd5afba-f23e-4e93-947a-21dc9c4055d2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.662088] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42756795-ee7a-4146-8d38-339db2f3177f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.683444] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97d054e2-ff54-4d2d-87be-15ec30982bd7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.690103] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Updating instance '99ca95d2-0fd4-49ee-9f91-389420c7a4ac' progress to 83 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1533.196200] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1533.196523] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b095d57e-59e5-4b20-accb-7d0d41450a28 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.204206] env[70020]: DEBUG oslo_vmware.api [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1533.204206] env[70020]: value = "task-3619422" [ 1533.204206] env[70020]: _type = "Task" [ 1533.204206] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.211651] env[70020]: DEBUG oslo_vmware.api [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619422, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.715061] env[70020]: DEBUG oslo_vmware.api [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619422, 'name': PowerOnVM_Task, 'duration_secs': 0.37829} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.715457] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1533.715502] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3b58034d-6b50-4045-a729-44273da4ebce tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Updating instance '99ca95d2-0fd4-49ee-9f91-389420c7a4ac' progress to 100 {{(pid=70020) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1536.224883] env[70020]: DEBUG nova.network.neutron [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Port 2c631bbd-dc11-42e5-9974-ae8b9992a1a5 binding to destination host cpu-1 is already ACTIVE {{(pid=70020) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1536.225225] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "refresh_cache-99ca95d2-0fd4-49ee-9f91-389420c7a4ac" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1536.225389] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquired lock "refresh_cache-99ca95d2-0fd4-49ee-9f91-389420c7a4ac" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1536.225599] env[70020]: DEBUG nova.network.neutron [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1536.945208] env[70020]: DEBUG nova.network.neutron [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Updating instance_info_cache with network_info: [{"id": "2c631bbd-dc11-42e5-9974-ae8b9992a1a5", "address": "fa:16:3e:bb:c1:e4", "network": {"id": "e7ac5730-3f56-469f-b3b1-25a06edc1284", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-765496064-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b34ec8c1ad864be694a6f9ce2b8a7788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c631bbd-dc", "ovs_interfaceid": "2c631bbd-dc11-42e5-9974-ae8b9992a1a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1537.448601] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Releasing lock "refresh_cache-99ca95d2-0fd4-49ee-9f91-389420c7a4ac" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1537.952428] env[70020]: DEBUG nova.compute.manager [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=70020) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1539.040653] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1539.040999] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1539.545418] env[70020]: DEBUG nova.objects.instance [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lazy-loading 'migration_context' on Instance uuid 99ca95d2-0fd4-49ee-9f91-389420c7a4ac {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1540.065838] env[70020]: DEBUG nova.scheduler.client.report [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Refreshing inventories for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1540.079915] env[70020]: DEBUG nova.scheduler.client.report [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Updating ProviderTree inventory for provider ee72c483-d9d9-4e62-8f73-e9f24668500d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1540.080137] env[70020]: DEBUG nova.compute.provider_tree [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Updating inventory in ProviderTree for provider ee72c483-d9d9-4e62-8f73-e9f24668500d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1540.090507] env[70020]: DEBUG nova.scheduler.client.report [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Refreshing aggregate associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, aggregates: None {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1540.107572] env[70020]: DEBUG nova.scheduler.client.report [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Refreshing trait associations for resource provider ee72c483-d9d9-4e62-8f73-e9f24668500d, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=70020) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1540.143012] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8639cb4-1a03-42a4-95a4-92ca2d9e2b5d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.150578] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6d44c89-712b-4243-916b-b0edc77e5ea2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.179734] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8f574aa-5dc7-414a-8e33-390c026762b6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.186607] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bdadce6-fad4-4736-b532-5d07306118ad {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.199097] env[70020]: DEBUG nova.compute.provider_tree [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1540.702373] env[70020]: DEBUG nova.scheduler.client.report [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1541.714405] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.673s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1543.252948] env[70020]: INFO nova.compute.manager [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Swapping old allocation on dict_keys(['ee72c483-d9d9-4e62-8f73-e9f24668500d']) held by migration c5e39bb8-b426-4c48-89db-2d674f105f1e for instance [ 1543.275052] env[70020]: DEBUG nova.scheduler.client.report [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Overwriting current allocation {'allocations': {'ee72c483-d9d9-4e62-8f73-e9f24668500d': {'resources': {'VCPU': 1, 'MEMORY_MB': 256, 'DISK_GB': 1}, 'generation': 203}}, 'project_id': 'b34ec8c1ad864be694a6f9ce2b8a7788', 'user_id': '274a4150c13f4ec0b34194f12b995f25', 'consumer_generation': 1} on consumer 99ca95d2-0fd4-49ee-9f91-389420c7a4ac {{(pid=70020) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1543.355185] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "refresh_cache-99ca95d2-0fd4-49ee-9f91-389420c7a4ac" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1543.355374] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquired lock "refresh_cache-99ca95d2-0fd4-49ee-9f91-389420c7a4ac" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1543.355551] env[70020]: DEBUG nova.network.neutron [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1544.053907] env[70020]: DEBUG nova.network.neutron [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Updating instance_info_cache with network_info: [{"id": "2c631bbd-dc11-42e5-9974-ae8b9992a1a5", "address": "fa:16:3e:bb:c1:e4", "network": {"id": "e7ac5730-3f56-469f-b3b1-25a06edc1284", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-765496064-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b34ec8c1ad864be694a6f9ce2b8a7788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c631bbd-dc", "ovs_interfaceid": "2c631bbd-dc11-42e5-9974-ae8b9992a1a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1544.556959] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Releasing lock "refresh_cache-99ca95d2-0fd4-49ee-9f91-389420c7a4ac" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1544.557998] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e279a40-5adc-4895-be6a-a3d1c2b3610f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.565221] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4d5f701-113e-4f56-b2ba-cb7e551f493a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.644314] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1545.644627] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-09f0ab3b-ff02-4f75-8d32-3b47b45e11b7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.653128] env[70020]: DEBUG oslo_vmware.api [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1545.653128] env[70020]: value = "task-3619423" [ 1545.653128] env[70020]: _type = "Task" [ 1545.653128] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.660831] env[70020]: DEBUG oslo_vmware.api [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619423, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.163028] env[70020]: DEBUG oslo_vmware.api [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619423, 'name': PowerOffVM_Task, 'duration_secs': 0.175792} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.163308] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1546.163954] env[70020]: DEBUG nova.virt.hardware [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1546.164185] env[70020]: DEBUG nova.virt.hardware [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1546.164339] env[70020]: DEBUG nova.virt.hardware [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1546.164528] env[70020]: DEBUG nova.virt.hardware [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1546.164669] env[70020]: DEBUG nova.virt.hardware [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1546.164812] env[70020]: DEBUG nova.virt.hardware [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1546.165015] env[70020]: DEBUG nova.virt.hardware [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1546.165182] env[70020]: DEBUG nova.virt.hardware [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1546.165347] env[70020]: DEBUG nova.virt.hardware [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1546.165508] env[70020]: DEBUG nova.virt.hardware [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1546.165672] env[70020]: DEBUG nova.virt.hardware [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1546.170663] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-51fcc313-c625-43a7-aa3a-99a2a6e235a3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.186217] env[70020]: DEBUG oslo_vmware.api [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1546.186217] env[70020]: value = "task-3619424" [ 1546.186217] env[70020]: _type = "Task" [ 1546.186217] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.193344] env[70020]: DEBUG oslo_vmware.api [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619424, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.695997] env[70020]: DEBUG oslo_vmware.api [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619424, 'name': ReconfigVM_Task, 'duration_secs': 0.164842} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.696807] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da03b6e9-f92b-43d4-b551-a65d74e9aa63 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.718815] env[70020]: DEBUG nova.virt.hardware [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1546.719043] env[70020]: DEBUG nova.virt.hardware [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1546.719201] env[70020]: DEBUG nova.virt.hardware [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1546.719381] env[70020]: DEBUG nova.virt.hardware [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1546.719524] env[70020]: DEBUG nova.virt.hardware [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1546.719684] env[70020]: DEBUG nova.virt.hardware [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1546.719920] env[70020]: DEBUG nova.virt.hardware [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1546.720096] env[70020]: DEBUG nova.virt.hardware [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1546.720261] env[70020]: DEBUG nova.virt.hardware [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1546.720448] env[70020]: DEBUG nova.virt.hardware [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1546.720631] env[70020]: DEBUG nova.virt.hardware [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1546.721415] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7138af19-99f7-4172-a270-3a11957022f1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.727360] env[70020]: DEBUG oslo_vmware.api [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1546.727360] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52a0baa7-64bf-f815-2f0b-9236f88c9b37" [ 1546.727360] env[70020]: _type = "Task" [ 1546.727360] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.734734] env[70020]: DEBUG oslo_vmware.api [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52a0baa7-64bf-f815-2f0b-9236f88c9b37, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.237295] env[70020]: DEBUG oslo_vmware.api [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52a0baa7-64bf-f815-2f0b-9236f88c9b37, 'name': SearchDatastore_Task, 'duration_secs': 0.009968} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.242548] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Reconfiguring VM instance instance-0000007f to detach disk 2000 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1547.242810] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fa5744f9-0190-4fc3-95d0-eff5cb5fe75a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.260217] env[70020]: DEBUG oslo_vmware.api [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1547.260217] env[70020]: value = "task-3619425" [ 1547.260217] env[70020]: _type = "Task" [ 1547.260217] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.267434] env[70020]: DEBUG oslo_vmware.api [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619425, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.769648] env[70020]: DEBUG oslo_vmware.api [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619425, 'name': ReconfigVM_Task, 'duration_secs': 0.229} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.769939] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Reconfigured VM instance instance-0000007f to detach disk 2000 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1547.770749] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b04d168c-988e-4c7b-8e92-a9f5a26fd897 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.794583] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Reconfiguring VM instance instance-0000007f to attach disk [datastore1] 99ca95d2-0fd4-49ee-9f91-389420c7a4ac/99ca95d2-0fd4-49ee-9f91-389420c7a4ac.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1547.794865] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-961c3bbc-d315-4adb-87ba-cc5fc42addaa {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.813113] env[70020]: DEBUG oslo_vmware.api [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1547.813113] env[70020]: value = "task-3619426" [ 1547.813113] env[70020]: _type = "Task" [ 1547.813113] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.820975] env[70020]: DEBUG oslo_vmware.api [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619426, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.323049] env[70020]: DEBUG oslo_vmware.api [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619426, 'name': ReconfigVM_Task, 'duration_secs': 0.291511} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.323325] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Reconfigured VM instance instance-0000007f to attach disk [datastore1] 99ca95d2-0fd4-49ee-9f91-389420c7a4ac/99ca95d2-0fd4-49ee-9f91-389420c7a4ac.vmdk or device None with type thin {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1548.324170] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e632aa4-88f2-426f-9837-3d4af05b0e0a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.343829] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a13f1cbf-03e6-401f-8489-ea06267b11c9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.364829] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b5152cf-9485-4df6-a263-97669ddaa18d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.384222] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-578904bd-89c5-4ec8-afb4-d0725f0158a8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.390424] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1548.390641] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b2aebf4f-5b98-4f06-b6a2-be65a8ce14f2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.396386] env[70020]: DEBUG oslo_vmware.api [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1548.396386] env[70020]: value = "task-3619427" [ 1548.396386] env[70020]: _type = "Task" [ 1548.396386] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.403528] env[70020]: DEBUG oslo_vmware.api [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619427, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.906433] env[70020]: DEBUG oslo_vmware.api [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619427, 'name': PowerOnVM_Task, 'duration_secs': 0.378532} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.906725] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1549.946603] env[70020]: INFO nova.compute.manager [None req-c449637e-1779-49b4-80b5-59cb7ee814fd tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Updating instance to original state: 'active' [ 1551.876872] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "99ca95d2-0fd4-49ee-9f91-389420c7a4ac" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1551.876872] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "99ca95d2-0fd4-49ee-9f91-389420c7a4ac" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1551.876872] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "99ca95d2-0fd4-49ee-9f91-389420c7a4ac-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1551.876872] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "99ca95d2-0fd4-49ee-9f91-389420c7a4ac-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1551.877347] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "99ca95d2-0fd4-49ee-9f91-389420c7a4ac-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1551.879309] env[70020]: INFO nova.compute.manager [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Terminating instance [ 1552.383982] env[70020]: DEBUG nova.compute.manager [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1552.384264] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1552.384562] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5355ac8b-29c8-4a05-bed5-73df0056fc79 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.392268] env[70020]: DEBUG oslo_vmware.api [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1552.392268] env[70020]: value = "task-3619428" [ 1552.392268] env[70020]: _type = "Task" [ 1552.392268] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.399956] env[70020]: DEBUG oslo_vmware.api [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619428, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.906060] env[70020]: DEBUG oslo_vmware.api [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619428, 'name': PowerOffVM_Task, 'duration_secs': 0.196818} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.906419] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1552.906628] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Volume detach. Driver type: vmdk {{(pid=70020) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1552.906742] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721863', 'volume_id': '086505f8-f56d-40ea-935a-06f022d2348c', 'name': 'volume-086505f8-f56d-40ea-935a-06f022d2348c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '99ca95d2-0fd4-49ee-9f91-389420c7a4ac', 'attached_at': '2025-04-25T23:16:46.000000', 'detached_at': '', 'volume_id': '086505f8-f56d-40ea-935a-06f022d2348c', 'serial': '086505f8-f56d-40ea-935a-06f022d2348c'} {{(pid=70020) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1552.907634] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93968c68-710f-49d9-9a90-a939301c7d25 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.940187] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21f937dc-4cb3-4bdb-ac9e-39af840d4e1c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.947611] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b00b3a0-42f1-4471-af67-f23a34472693 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.968232] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-949f2494-6acd-4a0c-8c27-4df70b3609a3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.982451] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] The volume has not been displaced from its original location: [datastore2] volume-086505f8-f56d-40ea-935a-06f022d2348c/volume-086505f8-f56d-40ea-935a-06f022d2348c.vmdk. No consolidation needed. {{(pid=70020) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1552.987555] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Reconfiguring VM instance instance-0000007f to detach disk 2001 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1552.987821] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6d05a6a-022a-4292-b829-f9847b398f43 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.005260] env[70020]: DEBUG oslo_vmware.api [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1553.005260] env[70020]: value = "task-3619429" [ 1553.005260] env[70020]: _type = "Task" [ 1553.005260] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.012852] env[70020]: DEBUG oslo_vmware.api [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619429, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.514309] env[70020]: DEBUG oslo_vmware.api [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619429, 'name': ReconfigVM_Task, 'duration_secs': 0.191722} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.514580] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Reconfigured VM instance instance-0000007f to detach disk 2001 {{(pid=70020) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1553.519049] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c0632fa2-f031-4303-8998-fff4e9db0599 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.533155] env[70020]: DEBUG oslo_vmware.api [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1553.533155] env[70020]: value = "task-3619430" [ 1553.533155] env[70020]: _type = "Task" [ 1553.533155] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.541009] env[70020]: DEBUG oslo_vmware.api [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619430, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.044541] env[70020]: DEBUG oslo_vmware.api [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619430, 'name': ReconfigVM_Task, 'duration_secs': 0.202872} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.044830] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-721863', 'volume_id': '086505f8-f56d-40ea-935a-06f022d2348c', 'name': 'volume-086505f8-f56d-40ea-935a-06f022d2348c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '99ca95d2-0fd4-49ee-9f91-389420c7a4ac', 'attached_at': '2025-04-25T23:16:46.000000', 'detached_at': '', 'volume_id': '086505f8-f56d-40ea-935a-06f022d2348c', 'serial': '086505f8-f56d-40ea-935a-06f022d2348c'} {{(pid=70020) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1554.045046] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1554.045763] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-463e55fd-1376-44e5-b6cf-dc3a3ea5ca83 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.052462] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1554.052663] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f16cc63a-f4f0-43a2-a897-7422dce2dbd1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.170786] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1554.170945] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1554.171133] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Deleting the datastore file [datastore1] 99ca95d2-0fd4-49ee-9f91-389420c7a4ac {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1554.171417] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ef311aae-8ccd-4384-8864-4ea821679bec {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.178644] env[70020]: DEBUG oslo_vmware.api [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1554.178644] env[70020]: value = "task-3619432" [ 1554.178644] env[70020]: _type = "Task" [ 1554.178644] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.186421] env[70020]: DEBUG oslo_vmware.api [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619432, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.688998] env[70020]: DEBUG oslo_vmware.api [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619432, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146128} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.689273] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1554.689402] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1554.689597] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1554.689775] env[70020]: INFO nova.compute.manager [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Took 2.31 seconds to destroy the instance on the hypervisor. [ 1554.690008] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1554.690198] env[70020]: DEBUG nova.compute.manager [-] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1554.690291] env[70020]: DEBUG nova.network.neutron [-] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1555.193909] env[70020]: DEBUG nova.compute.manager [req-c0eace09-3822-438e-b0f7-915f88c2ca55 req-8300338e-ad75-4c1e-88c5-503ce52fc3cf service nova] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Received event network-vif-deleted-2c631bbd-dc11-42e5-9974-ae8b9992a1a5 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1555.194200] env[70020]: INFO nova.compute.manager [req-c0eace09-3822-438e-b0f7-915f88c2ca55 req-8300338e-ad75-4c1e-88c5-503ce52fc3cf service nova] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Neutron deleted interface 2c631bbd-dc11-42e5-9974-ae8b9992a1a5; detaching it from the instance and deleting it from the info cache [ 1555.194347] env[70020]: DEBUG nova.network.neutron [req-c0eace09-3822-438e-b0f7-915f88c2ca55 req-8300338e-ad75-4c1e-88c5-503ce52fc3cf service nova] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1555.626046] env[70020]: DEBUG nova.network.neutron [-] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1555.697171] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9fbe5231-c1f6-40cb-a1f2-284a151dae0d {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.707715] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c90aeee-d333-44b7-9079-2326eccc72b7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.731055] env[70020]: DEBUG nova.compute.manager [req-c0eace09-3822-438e-b0f7-915f88c2ca55 req-8300338e-ad75-4c1e-88c5-503ce52fc3cf service nova] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Detach interface failed, port_id=2c631bbd-dc11-42e5-9974-ae8b9992a1a5, reason: Instance 99ca95d2-0fd4-49ee-9f91-389420c7a4ac could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1556.127743] env[70020]: INFO nova.compute.manager [-] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Took 1.44 seconds to deallocate network for instance. [ 1556.671629] env[70020]: INFO nova.compute.manager [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: 99ca95d2-0fd4-49ee-9f91-389420c7a4ac] Took 0.54 seconds to detach 1 volumes for instance. [ 1557.177895] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1557.178214] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1557.178445] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1557.201525] env[70020]: INFO nova.scheduler.client.report [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Deleted allocations for instance 99ca95d2-0fd4-49ee-9f91-389420c7a4ac [ 1557.708614] env[70020]: DEBUG oslo_concurrency.lockutils [None req-3eea78f0-f11f-4523-bf57-08e94ed6da39 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "99ca95d2-0fd4-49ee-9f91-389420c7a4ac" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.832s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1558.996906] env[70020]: DEBUG oslo_concurrency.lockutils [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1558.997205] env[70020]: DEBUG oslo_concurrency.lockutils [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1559.499534] env[70020]: DEBUG nova.compute.manager [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Starting instance... {{(pid=70020) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1560.018228] env[70020]: DEBUG oslo_concurrency.lockutils [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1560.018499] env[70020]: DEBUG oslo_concurrency.lockutils [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1560.020304] env[70020]: INFO nova.compute.claims [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1561.054536] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ef8b7b3-ffa0-4f46-8821-61505a316eef {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.061991] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eefc641d-609b-49e8-8352-c3f54c378369 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.090858] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce1a5edc-abde-470d-9a42-6b33115a4185 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.097796] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0abecfa-add2-45b2-bdfc-2664ac0bd0af {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.111361] env[70020]: DEBUG nova.compute.provider_tree [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1561.614952] env[70020]: DEBUG nova.scheduler.client.report [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1562.120261] env[70020]: DEBUG oslo_concurrency.lockutils [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.102s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1562.120840] env[70020]: DEBUG nova.compute.manager [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Start building networks asynchronously for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1562.625760] env[70020]: DEBUG nova.compute.utils [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Using /dev/sd instead of None {{(pid=70020) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1562.627170] env[70020]: DEBUG nova.compute.manager [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Allocating IP information in the background. {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1562.627343] env[70020]: DEBUG nova.network.neutron [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] allocate_for_instance() {{(pid=70020) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1562.679280] env[70020]: DEBUG nova.policy [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '274a4150c13f4ec0b34194f12b995f25', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b34ec8c1ad864be694a6f9ce2b8a7788', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=70020) authorize /opt/stack/nova/nova/policy.py:192}} [ 1562.949969] env[70020]: DEBUG nova.network.neutron [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Successfully created port: e6329052-d285-488b-88cb-7c16a57a7be3 {{(pid=70020) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1563.130639] env[70020]: DEBUG nova.compute.manager [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Start building block device mappings for instance. {{(pid=70020) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1564.140247] env[70020]: DEBUG nova.compute.manager [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Start spawning the instance on the hypervisor. {{(pid=70020) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1564.169941] env[70020]: DEBUG nova.virt.hardware [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-25T22:58:53Z,direct_url=,disk_format='vmdk',id=c9cd83bf-fd12-4173-a067-f57d38f23556,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8f53857c59164417b433ba5cd10274ac',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-25T22:58:54Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1564.170203] env[70020]: DEBUG nova.virt.hardware [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1564.170357] env[70020]: DEBUG nova.virt.hardware [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1564.170564] env[70020]: DEBUG nova.virt.hardware [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1564.170726] env[70020]: DEBUG nova.virt.hardware [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1564.170878] env[70020]: DEBUG nova.virt.hardware [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1564.171096] env[70020]: DEBUG nova.virt.hardware [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1564.171261] env[70020]: DEBUG nova.virt.hardware [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1564.171424] env[70020]: DEBUG nova.virt.hardware [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1564.171609] env[70020]: DEBUG nova.virt.hardware [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1564.171789] env[70020]: DEBUG nova.virt.hardware [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1564.172676] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9e41097-2a57-4fba-bd1f-96f43761f0ab {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.180890] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e36af67d-1e1c-4bf0-a149-cbd023acd5a9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.300544] env[70020]: DEBUG nova.compute.manager [req-18b56df7-0e0c-4725-8f48-ad94baf5fd04 req-a36b9ec3-38ac-46ce-9c83-ff9027b0a113 service nova] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Received event network-vif-plugged-e6329052-d285-488b-88cb-7c16a57a7be3 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1564.300829] env[70020]: DEBUG oslo_concurrency.lockutils [req-18b56df7-0e0c-4725-8f48-ad94baf5fd04 req-a36b9ec3-38ac-46ce-9c83-ff9027b0a113 service nova] Acquiring lock "b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1564.300971] env[70020]: DEBUG oslo_concurrency.lockutils [req-18b56df7-0e0c-4725-8f48-ad94baf5fd04 req-a36b9ec3-38ac-46ce-9c83-ff9027b0a113 service nova] Lock "b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1564.301161] env[70020]: DEBUG oslo_concurrency.lockutils [req-18b56df7-0e0c-4725-8f48-ad94baf5fd04 req-a36b9ec3-38ac-46ce-9c83-ff9027b0a113 service nova] Lock "b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1564.301326] env[70020]: DEBUG nova.compute.manager [req-18b56df7-0e0c-4725-8f48-ad94baf5fd04 req-a36b9ec3-38ac-46ce-9c83-ff9027b0a113 service nova] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] No waiting events found dispatching network-vif-plugged-e6329052-d285-488b-88cb-7c16a57a7be3 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1564.301486] env[70020]: WARNING nova.compute.manager [req-18b56df7-0e0c-4725-8f48-ad94baf5fd04 req-a36b9ec3-38ac-46ce-9c83-ff9027b0a113 service nova] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Received unexpected event network-vif-plugged-e6329052-d285-488b-88cb-7c16a57a7be3 for instance with vm_state building and task_state spawning. [ 1564.397291] env[70020]: DEBUG nova.network.neutron [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Successfully updated port: e6329052-d285-488b-88cb-7c16a57a7be3 {{(pid=70020) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1564.900195] env[70020]: DEBUG oslo_concurrency.lockutils [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "refresh_cache-b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1564.900374] env[70020]: DEBUG oslo_concurrency.lockutils [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquired lock "refresh_cache-b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1564.900501] env[70020]: DEBUG nova.network.neutron [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1565.134658] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager.update_available_resource {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1565.434394] env[70020]: DEBUG nova.network.neutron [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Instance cache missing network info. {{(pid=70020) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1565.558153] env[70020]: DEBUG nova.network.neutron [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Updating instance_info_cache with network_info: [{"id": "e6329052-d285-488b-88cb-7c16a57a7be3", "address": "fa:16:3e:22:d8:96", "network": {"id": "e7ac5730-3f56-469f-b3b1-25a06edc1284", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-765496064-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b34ec8c1ad864be694a6f9ce2b8a7788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6329052-d2", "ovs_interfaceid": "e6329052-d285-488b-88cb-7c16a57a7be3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1565.637641] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1565.637897] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1565.638138] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1565.638374] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=70020) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1565.639430] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fec74903-0930-44be-9f56-0b0105068ad5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.647720] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d693b636-54d4-4c08-943d-7945a6195b70 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.662228] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ca7e3f7-3cd0-4ece-91f6-5f81f7a85b44 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.668919] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d99ebd3-529d-4c4d-9cc6-79f5360642a9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.697941] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180724MB free_disk=76GB free_vcpus=48 pci_devices=None {{(pid=70020) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1565.698126] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1565.698298] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1566.060635] env[70020]: DEBUG oslo_concurrency.lockutils [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Releasing lock "refresh_cache-b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1566.060996] env[70020]: DEBUG nova.compute.manager [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Instance network_info: |[{"id": "e6329052-d285-488b-88cb-7c16a57a7be3", "address": "fa:16:3e:22:d8:96", "network": {"id": "e7ac5730-3f56-469f-b3b1-25a06edc1284", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-765496064-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b34ec8c1ad864be694a6f9ce2b8a7788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6329052-d2", "ovs_interfaceid": "e6329052-d285-488b-88cb-7c16a57a7be3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=70020) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1566.061438] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:22:d8:96', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1e7a4976-597e-4636-990e-6062b5faadee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e6329052-d285-488b-88cb-7c16a57a7be3', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1566.068770] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1566.068978] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1566.069213] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1a2aa226-ca0f-411b-a5de-bc99a6bd4178 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.088402] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1566.088402] env[70020]: value = "task-3619433" [ 1566.088402] env[70020]: _type = "Task" [ 1566.088402] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.095808] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619433, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.328694] env[70020]: DEBUG nova.compute.manager [req-462276c3-6e9f-497e-b4c6-082ff4951367 req-85d504cc-c41b-433e-a44d-ef54dad8b023 service nova] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Received event network-changed-e6329052-d285-488b-88cb-7c16a57a7be3 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1566.328923] env[70020]: DEBUG nova.compute.manager [req-462276c3-6e9f-497e-b4c6-082ff4951367 req-85d504cc-c41b-433e-a44d-ef54dad8b023 service nova] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Refreshing instance network info cache due to event network-changed-e6329052-d285-488b-88cb-7c16a57a7be3. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1566.329228] env[70020]: DEBUG oslo_concurrency.lockutils [req-462276c3-6e9f-497e-b4c6-082ff4951367 req-85d504cc-c41b-433e-a44d-ef54dad8b023 service nova] Acquiring lock "refresh_cache-b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1566.329415] env[70020]: DEBUG oslo_concurrency.lockutils [req-462276c3-6e9f-497e-b4c6-082ff4951367 req-85d504cc-c41b-433e-a44d-ef54dad8b023 service nova] Acquired lock "refresh_cache-b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1566.329594] env[70020]: DEBUG nova.network.neutron [req-462276c3-6e9f-497e-b4c6-082ff4951367 req-85d504cc-c41b-433e-a44d-ef54dad8b023 service nova] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Refreshing network info cache for port e6329052-d285-488b-88cb-7c16a57a7be3 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1566.600469] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619433, 'name': CreateVM_Task, 'duration_secs': 0.302897} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.600837] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1566.601263] env[70020]: DEBUG oslo_concurrency.lockutils [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1566.601429] env[70020]: DEBUG oslo_concurrency.lockutils [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1566.601743] env[70020]: DEBUG oslo_concurrency.lockutils [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1566.601988] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12d4a159-0d9c-4cf0-8a0c-fce8dfaa2c51 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.606145] env[70020]: DEBUG oslo_vmware.api [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1566.606145] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]525d5bbd-7532-c746-4748-22d892f1550d" [ 1566.606145] env[70020]: _type = "Task" [ 1566.606145] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.613228] env[70020]: DEBUG oslo_vmware.api [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]525d5bbd-7532-c746-4748-22d892f1550d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.723220] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1566.723425] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=70020) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1566.723574] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=70020) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1566.748900] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd7bf070-b67e-49d1-8acb-184f0581ae55 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.756058] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6d95c5a-f60f-4bf1-857a-090533e9068a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.786113] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dbcb9f7-b036-405e-848c-463f9e83bf9a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.793109] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca336f7d-48ac-4a0a-a02f-d7ec79d6031b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.805936] env[70020]: DEBUG nova.compute.provider_tree [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1567.034148] env[70020]: DEBUG nova.network.neutron [req-462276c3-6e9f-497e-b4c6-082ff4951367 req-85d504cc-c41b-433e-a44d-ef54dad8b023 service nova] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Updated VIF entry in instance network info cache for port e6329052-d285-488b-88cb-7c16a57a7be3. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1567.034534] env[70020]: DEBUG nova.network.neutron [req-462276c3-6e9f-497e-b4c6-082ff4951367 req-85d504cc-c41b-433e-a44d-ef54dad8b023 service nova] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Updating instance_info_cache with network_info: [{"id": "e6329052-d285-488b-88cb-7c16a57a7be3", "address": "fa:16:3e:22:d8:96", "network": {"id": "e7ac5730-3f56-469f-b3b1-25a06edc1284", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-765496064-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b34ec8c1ad864be694a6f9ce2b8a7788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6329052-d2", "ovs_interfaceid": "e6329052-d285-488b-88cb-7c16a57a7be3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1567.116972] env[70020]: DEBUG oslo_vmware.api [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]525d5bbd-7532-c746-4748-22d892f1550d, 'name': SearchDatastore_Task, 'duration_secs': 0.010614} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.117302] env[70020]: DEBUG oslo_concurrency.lockutils [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1567.117499] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Processing image c9cd83bf-fd12-4173-a067-f57d38f23556 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1567.117731] env[70020]: DEBUG oslo_concurrency.lockutils [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1567.117876] env[70020]: DEBUG oslo_concurrency.lockutils [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1567.118063] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1567.118338] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9ad50f12-8b0f-424b-928f-6195bf1bb8bb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.126590] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1567.126761] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1567.127530] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8cad9dce-3110-4257-b4b3-7d188a3869c3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.132538] env[70020]: DEBUG oslo_vmware.api [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1567.132538] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]526a4e59-0fba-f9ac-9f3b-6321ff7764ba" [ 1567.132538] env[70020]: _type = "Task" [ 1567.132538] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.139592] env[70020]: DEBUG oslo_vmware.api [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]526a4e59-0fba-f9ac-9f3b-6321ff7764ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.308940] env[70020]: DEBUG nova.scheduler.client.report [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1567.537664] env[70020]: DEBUG oslo_concurrency.lockutils [req-462276c3-6e9f-497e-b4c6-082ff4951367 req-85d504cc-c41b-433e-a44d-ef54dad8b023 service nova] Releasing lock "refresh_cache-b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1567.643623] env[70020]: DEBUG oslo_vmware.api [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]526a4e59-0fba-f9ac-9f3b-6321ff7764ba, 'name': SearchDatastore_Task, 'duration_secs': 0.008026} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.644374] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-683a044c-ef86-4d43-b60c-1419363a6975 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.649975] env[70020]: DEBUG oslo_vmware.api [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1567.649975] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52eaa447-d126-f56c-24ce-2b43a5ff0825" [ 1567.649975] env[70020]: _type = "Task" [ 1567.649975] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.656938] env[70020]: DEBUG oslo_vmware.api [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52eaa447-d126-f56c-24ce-2b43a5ff0825, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.814066] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=70020) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1567.814284] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.116s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1568.160801] env[70020]: DEBUG oslo_vmware.api [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52eaa447-d126-f56c-24ce-2b43a5ff0825, 'name': SearchDatastore_Task, 'duration_secs': 0.009067} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1568.161067] env[70020]: DEBUG oslo_concurrency.lockutils [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1568.161325] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95/b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1568.161580] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-39307dd1-d8a8-4fa5-9133-71b9bbbb13ef {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.167985] env[70020]: DEBUG oslo_vmware.api [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1568.167985] env[70020]: value = "task-3619434" [ 1568.167985] env[70020]: _type = "Task" [ 1568.167985] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1568.176276] env[70020]: DEBUG oslo_vmware.api [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619434, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.677736] env[70020]: DEBUG oslo_vmware.api [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619434, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.413119} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1568.678125] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9cd83bf-fd12-4173-a067-f57d38f23556/c9cd83bf-fd12-4173-a067-f57d38f23556.vmdk to [datastore1] b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95/b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1568.678269] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Extending root virtual disk to 1048576 {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1568.678525] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4ac6ee93-acb3-4f2b-994a-35cdfae70565 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.684645] env[70020]: DEBUG oslo_vmware.api [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1568.684645] env[70020]: value = "task-3619435" [ 1568.684645] env[70020]: _type = "Task" [ 1568.684645] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1568.691439] env[70020]: DEBUG oslo_vmware.api [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619435, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.195238] env[70020]: DEBUG oslo_vmware.api [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619435, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058044} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1569.195523] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Extended root virtual disk {{(pid=70020) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1569.196315] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5d9ca8a-685d-4d6f-ac90-f7c7a60ac63c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.217687] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Reconfiguring VM instance instance-00000080 to attach disk [datastore1] b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95/b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1569.218183] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0e17fa59-0294-49b8-9d5d-8bb91cf48579 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.237137] env[70020]: DEBUG oslo_vmware.api [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1569.237137] env[70020]: value = "task-3619436" [ 1569.237137] env[70020]: _type = "Task" [ 1569.237137] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.244705] env[70020]: DEBUG oslo_vmware.api [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619436, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.747557] env[70020]: DEBUG oslo_vmware.api [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619436, 'name': ReconfigVM_Task, 'duration_secs': 0.286084} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1569.747900] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Reconfigured VM instance instance-00000080 to attach disk [datastore1] b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95/b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95.vmdk or device None with type sparse {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1569.748538] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-162efdbd-d33e-43b1-98ff-cb0efbe8af38 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.754166] env[70020]: DEBUG oslo_vmware.api [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1569.754166] env[70020]: value = "task-3619437" [ 1569.754166] env[70020]: _type = "Task" [ 1569.754166] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.761284] env[70020]: DEBUG oslo_vmware.api [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619437, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.264425] env[70020]: DEBUG oslo_vmware.api [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619437, 'name': Rename_Task, 'duration_secs': 0.128954} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1570.264784] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1570.265048] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4f24ca4a-e97d-4479-b7b0-a85c44b1fe54 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.272839] env[70020]: DEBUG oslo_vmware.api [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1570.272839] env[70020]: value = "task-3619438" [ 1570.272839] env[70020]: _type = "Task" [ 1570.272839] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.281249] env[70020]: DEBUG oslo_vmware.api [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619438, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.782690] env[70020]: DEBUG oslo_vmware.api [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619438, 'name': PowerOnVM_Task, 'duration_secs': 0.412925} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1570.783060] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1570.783288] env[70020]: INFO nova.compute.manager [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Took 6.64 seconds to spawn the instance on the hypervisor. [ 1570.783500] env[70020]: DEBUG nova.compute.manager [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1570.784263] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2a80ecd-0198-4b7c-92df-fe326cc47b53 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.815033] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1570.815033] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1570.815033] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1570.815299] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1570.815337] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1570.815442] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=70020) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1571.135057] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1571.303027] env[70020]: INFO nova.compute.manager [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Took 11.30 seconds to build instance. [ 1571.804275] env[70020]: DEBUG oslo_concurrency.lockutils [None req-84f20e4a-d6c6-4180-9ba6-1d3ad059fea3 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.807s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1571.935450] env[70020]: DEBUG nova.compute.manager [req-2743101e-d0c8-4a76-bef5-e81bc59e2902 req-b301b113-0517-4090-98e3-adb59016186d service nova] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Received event network-changed-e6329052-d285-488b-88cb-7c16a57a7be3 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1571.935536] env[70020]: DEBUG nova.compute.manager [req-2743101e-d0c8-4a76-bef5-e81bc59e2902 req-b301b113-0517-4090-98e3-adb59016186d service nova] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Refreshing instance network info cache due to event network-changed-e6329052-d285-488b-88cb-7c16a57a7be3. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1571.935745] env[70020]: DEBUG oslo_concurrency.lockutils [req-2743101e-d0c8-4a76-bef5-e81bc59e2902 req-b301b113-0517-4090-98e3-adb59016186d service nova] Acquiring lock "refresh_cache-b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1571.935883] env[70020]: DEBUG oslo_concurrency.lockutils [req-2743101e-d0c8-4a76-bef5-e81bc59e2902 req-b301b113-0517-4090-98e3-adb59016186d service nova] Acquired lock "refresh_cache-b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1571.936057] env[70020]: DEBUG nova.network.neutron [req-2743101e-d0c8-4a76-bef5-e81bc59e2902 req-b301b113-0517-4090-98e3-adb59016186d service nova] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Refreshing network info cache for port e6329052-d285-488b-88cb-7c16a57a7be3 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1572.638605] env[70020]: DEBUG nova.network.neutron [req-2743101e-d0c8-4a76-bef5-e81bc59e2902 req-b301b113-0517-4090-98e3-adb59016186d service nova] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Updated VIF entry in instance network info cache for port e6329052-d285-488b-88cb-7c16a57a7be3. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1572.639068] env[70020]: DEBUG nova.network.neutron [req-2743101e-d0c8-4a76-bef5-e81bc59e2902 req-b301b113-0517-4090-98e3-adb59016186d service nova] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Updating instance_info_cache with network_info: [{"id": "e6329052-d285-488b-88cb-7c16a57a7be3", "address": "fa:16:3e:22:d8:96", "network": {"id": "e7ac5730-3f56-469f-b3b1-25a06edc1284", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-765496064-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b34ec8c1ad864be694a6f9ce2b8a7788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6329052-d2", "ovs_interfaceid": "e6329052-d285-488b-88cb-7c16a57a7be3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1573.141815] env[70020]: DEBUG oslo_concurrency.lockutils [req-2743101e-d0c8-4a76-bef5-e81bc59e2902 req-b301b113-0517-4090-98e3-adb59016186d service nova] Releasing lock "refresh_cache-b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1575.135844] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1609.058062] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1609.058062] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1609.058062] env[70020]: INFO nova.compute.manager [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Shelving [ 1610.066479] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1610.066857] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c7ed19c8-0a77-4494-bf90-2c9fbd25b645 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.074361] env[70020]: DEBUG oslo_vmware.api [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1610.074361] env[70020]: value = "task-3619439" [ 1610.074361] env[70020]: _type = "Task" [ 1610.074361] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.082709] env[70020]: DEBUG oslo_vmware.api [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619439, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.584034] env[70020]: DEBUG oslo_vmware.api [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619439, 'name': PowerOffVM_Task, 'duration_secs': 0.165471} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.584340] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1610.585119] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cf99672-c4b7-410e-9d46-56ce6b63b302 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.602829] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e8a5704-82df-4346-9ad0-946ae2ed85f2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.112600] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Creating Snapshot of the VM instance {{(pid=70020) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1611.112955] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-73462b74-63c3-4523-b318-20ee50812dcb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.121250] env[70020]: DEBUG oslo_vmware.api [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1611.121250] env[70020]: value = "task-3619440" [ 1611.121250] env[70020]: _type = "Task" [ 1611.121250] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.129664] env[70020]: DEBUG oslo_vmware.api [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619440, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.632289] env[70020]: DEBUG oslo_vmware.api [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619440, 'name': CreateSnapshot_Task, 'duration_secs': 0.403342} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.632530] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Created Snapshot of the VM instance {{(pid=70020) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1611.633242] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33006f3c-b1d9-472b-bebc-3a98664383d9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.150014] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Creating linked-clone VM from snapshot {{(pid=70020) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1612.150386] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-33591f0a-9560-491a-8a4d-80bcbd83438e {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.158936] env[70020]: DEBUG oslo_vmware.api [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1612.158936] env[70020]: value = "task-3619441" [ 1612.158936] env[70020]: _type = "Task" [ 1612.158936] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1612.166574] env[70020]: DEBUG oslo_vmware.api [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619441, 'name': CloneVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.669402] env[70020]: DEBUG oslo_vmware.api [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619441, 'name': CloneVM_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.169187] env[70020]: DEBUG oslo_vmware.api [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619441, 'name': CloneVM_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.671008] env[70020]: DEBUG oslo_vmware.api [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619441, 'name': CloneVM_Task, 'duration_secs': 1.017776} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.671330] env[70020]: INFO nova.virt.vmwareapi.vmops [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Created linked-clone VM from snapshot [ 1613.672046] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c62d4922-64c0-4426-b7bc-101e8baddcd5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.678659] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Uploading image 01bbe943-2076-477a-875d-b4b83109b047 {{(pid=70020) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1613.698255] env[70020]: DEBUG oslo_vmware.rw_handles [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1613.698255] env[70020]: value = "vm-721866" [ 1613.698255] env[70020]: _type = "VirtualMachine" [ 1613.698255] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1613.698497] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-cc876490-9b57-421d-b401-49e87e704974 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.704893] env[70020]: DEBUG oslo_vmware.rw_handles [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lease: (returnval){ [ 1613.704893] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52c535b0-7c92-061b-4a3e-1d202a7dbd21" [ 1613.704893] env[70020]: _type = "HttpNfcLease" [ 1613.704893] env[70020]: } obtained for exporting VM: (result){ [ 1613.704893] env[70020]: value = "vm-721866" [ 1613.704893] env[70020]: _type = "VirtualMachine" [ 1613.704893] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1613.705233] env[70020]: DEBUG oslo_vmware.api [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the lease: (returnval){ [ 1613.705233] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52c535b0-7c92-061b-4a3e-1d202a7dbd21" [ 1613.705233] env[70020]: _type = "HttpNfcLease" [ 1613.705233] env[70020]: } to be ready. {{(pid=70020) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1613.712174] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1613.712174] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52c535b0-7c92-061b-4a3e-1d202a7dbd21" [ 1613.712174] env[70020]: _type = "HttpNfcLease" [ 1613.712174] env[70020]: } is initializing. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1614.213304] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1614.213304] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52c535b0-7c92-061b-4a3e-1d202a7dbd21" [ 1614.213304] env[70020]: _type = "HttpNfcLease" [ 1614.213304] env[70020]: } is ready. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1614.213662] env[70020]: DEBUG oslo_vmware.rw_handles [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1614.213662] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52c535b0-7c92-061b-4a3e-1d202a7dbd21" [ 1614.213662] env[70020]: _type = "HttpNfcLease" [ 1614.213662] env[70020]: }. {{(pid=70020) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1614.214297] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca1aefcd-95e6-48df-8ef0-ce2322858563 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.221491] env[70020]: DEBUG oslo_vmware.rw_handles [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c1b43e-f921-94f3-d1eb-83b87eb4efb3/disk-0.vmdk from lease info. {{(pid=70020) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1614.221659] env[70020]: DEBUG oslo_vmware.rw_handles [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c1b43e-f921-94f3-d1eb-83b87eb4efb3/disk-0.vmdk for reading. {{(pid=70020) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1614.305118] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-c68cd6f1-3917-48c3-8994-b89b1c95ffd8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.416056] env[70020]: DEBUG oslo_vmware.rw_handles [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c1b43e-f921-94f3-d1eb-83b87eb4efb3/disk-0.vmdk. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1621.416954] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2ebc4d8-0f07-483e-a46b-06f7d86e48ac {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.423137] env[70020]: DEBUG oslo_vmware.rw_handles [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c1b43e-f921-94f3-d1eb-83b87eb4efb3/disk-0.vmdk is in state: ready. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1621.423301] env[70020]: ERROR oslo_vmware.rw_handles [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c1b43e-f921-94f3-d1eb-83b87eb4efb3/disk-0.vmdk due to incomplete transfer. [ 1621.423508] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-515e41cb-b8c4-4db4-935d-55b8fc8c4ca6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.430195] env[70020]: DEBUG oslo_vmware.rw_handles [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c1b43e-f921-94f3-d1eb-83b87eb4efb3/disk-0.vmdk. {{(pid=70020) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1621.430382] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Uploaded image 01bbe943-2076-477a-875d-b4b83109b047 to the Glance image server {{(pid=70020) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1621.432644] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Destroying the VM {{(pid=70020) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1621.432866] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-63e98b5b-8745-44c5-b3a6-bc56df04dac9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.437760] env[70020]: DEBUG oslo_vmware.api [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1621.437760] env[70020]: value = "task-3619443" [ 1621.437760] env[70020]: _type = "Task" [ 1621.437760] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.444913] env[70020]: DEBUG oslo_vmware.api [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619443, 'name': Destroy_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.947658] env[70020]: DEBUG oslo_vmware.api [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619443, 'name': Destroy_Task, 'duration_secs': 0.331003} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.947917] env[70020]: INFO nova.virt.vmwareapi.vm_util [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Destroyed the VM [ 1621.948180] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Deleting Snapshot of the VM instance {{(pid=70020) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1621.948425] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-d0008e2c-e7ce-4d82-a5d6-1fc59eaf37f2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.954460] env[70020]: DEBUG oslo_vmware.api [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1621.954460] env[70020]: value = "task-3619444" [ 1621.954460] env[70020]: _type = "Task" [ 1621.954460] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.961854] env[70020]: DEBUG oslo_vmware.api [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619444, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.464294] env[70020]: DEBUG oslo_vmware.api [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619444, 'name': RemoveSnapshot_Task, 'duration_secs': 0.346551} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.464667] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Deleted Snapshot of the VM instance {{(pid=70020) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1622.464807] env[70020]: DEBUG nova.compute.manager [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1622.465558] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20e596c7-22d7-4e69-9f64-1d569474e45a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.977431] env[70020]: INFO nova.compute.manager [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Shelve offloading [ 1623.482685] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1623.483017] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-abb8b5e9-564f-42ca-a117-e8826048b0ba {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.490136] env[70020]: DEBUG oslo_vmware.api [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1623.490136] env[70020]: value = "task-3619445" [ 1623.490136] env[70020]: _type = "Task" [ 1623.490136] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1623.497656] env[70020]: DEBUG oslo_vmware.api [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619445, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.000685] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] VM already powered off {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1624.000976] env[70020]: DEBUG nova.compute.manager [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1624.001708] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7f98333-0566-4f49-baaa-db56199499ae {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.007320] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "refresh_cache-b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1624.007488] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquired lock "refresh_cache-b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1624.007656] env[70020]: DEBUG nova.network.neutron [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1624.721858] env[70020]: DEBUG nova.network.neutron [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Updating instance_info_cache with network_info: [{"id": "e6329052-d285-488b-88cb-7c16a57a7be3", "address": "fa:16:3e:22:d8:96", "network": {"id": "e7ac5730-3f56-469f-b3b1-25a06edc1284", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-765496064-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b34ec8c1ad864be694a6f9ce2b8a7788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6329052-d2", "ovs_interfaceid": "e6329052-d285-488b-88cb-7c16a57a7be3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1625.225026] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Releasing lock "refresh_cache-b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1625.425157] env[70020]: DEBUG nova.compute.manager [req-02ccf86c-9b3d-4ed7-886e-9fcc8697c7ba req-280289cb-902e-4edb-ab7a-a7f8b85ac051 service nova] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Received event network-vif-unplugged-e6329052-d285-488b-88cb-7c16a57a7be3 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1625.425505] env[70020]: DEBUG oslo_concurrency.lockutils [req-02ccf86c-9b3d-4ed7-886e-9fcc8697c7ba req-280289cb-902e-4edb-ab7a-a7f8b85ac051 service nova] Acquiring lock "b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1625.425600] env[70020]: DEBUG oslo_concurrency.lockutils [req-02ccf86c-9b3d-4ed7-886e-9fcc8697c7ba req-280289cb-902e-4edb-ab7a-a7f8b85ac051 service nova] Lock "b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1625.425722] env[70020]: DEBUG oslo_concurrency.lockutils [req-02ccf86c-9b3d-4ed7-886e-9fcc8697c7ba req-280289cb-902e-4edb-ab7a-a7f8b85ac051 service nova] Lock "b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1625.425888] env[70020]: DEBUG nova.compute.manager [req-02ccf86c-9b3d-4ed7-886e-9fcc8697c7ba req-280289cb-902e-4edb-ab7a-a7f8b85ac051 service nova] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] No waiting events found dispatching network-vif-unplugged-e6329052-d285-488b-88cb-7c16a57a7be3 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1625.426109] env[70020]: WARNING nova.compute.manager [req-02ccf86c-9b3d-4ed7-886e-9fcc8697c7ba req-280289cb-902e-4edb-ab7a-a7f8b85ac051 service nova] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Received unexpected event network-vif-unplugged-e6329052-d285-488b-88cb-7c16a57a7be3 for instance with vm_state shelved and task_state shelving_offloading. [ 1625.527571] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1625.528498] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-190e83bb-9392-409b-839b-06f9a8ea8d39 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.536121] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1625.536357] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-237b046b-a6be-48d0-8530-2c0d92ca04af {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.609555] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1625.609730] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1625.609891] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Deleting the datastore file [datastore1] b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1625.610176] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cc94e987-94bb-41a7-aed5-6d10be88203a {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.616735] env[70020]: DEBUG oslo_vmware.api [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1625.616735] env[70020]: value = "task-3619447" [ 1625.616735] env[70020]: _type = "Task" [ 1625.616735] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.624421] env[70020]: DEBUG oslo_vmware.api [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619447, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.126295] env[70020]: DEBUG oslo_vmware.api [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619447, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.118248} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.126681] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1626.126801] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1626.126867] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1626.148683] env[70020]: INFO nova.scheduler.client.report [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Deleted allocations for instance b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95 [ 1626.653647] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1626.653975] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1626.654288] env[70020]: DEBUG nova.objects.instance [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lazy-loading 'resources' on Instance uuid b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1627.134720] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager.update_available_resource {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1627.157319] env[70020]: DEBUG nova.objects.instance [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lazy-loading 'numa_topology' on Instance uuid b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1627.453617] env[70020]: DEBUG nova.compute.manager [req-c889a634-e9eb-4095-bf53-2a4fb6f9c339 req-91120e8d-51b8-4ca7-be0c-8e87d4cf7e4c service nova] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Received event network-changed-e6329052-d285-488b-88cb-7c16a57a7be3 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1627.453800] env[70020]: DEBUG nova.compute.manager [req-c889a634-e9eb-4095-bf53-2a4fb6f9c339 req-91120e8d-51b8-4ca7-be0c-8e87d4cf7e4c service nova] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Refreshing instance network info cache due to event network-changed-e6329052-d285-488b-88cb-7c16a57a7be3. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1627.453928] env[70020]: DEBUG oslo_concurrency.lockutils [req-c889a634-e9eb-4095-bf53-2a4fb6f9c339 req-91120e8d-51b8-4ca7-be0c-8e87d4cf7e4c service nova] Acquiring lock "refresh_cache-b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1627.454082] env[70020]: DEBUG oslo_concurrency.lockutils [req-c889a634-e9eb-4095-bf53-2a4fb6f9c339 req-91120e8d-51b8-4ca7-be0c-8e87d4cf7e4c service nova] Acquired lock "refresh_cache-b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1627.454245] env[70020]: DEBUG nova.network.neutron [req-c889a634-e9eb-4095-bf53-2a4fb6f9c339 req-91120e8d-51b8-4ca7-be0c-8e87d4cf7e4c service nova] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Refreshing network info cache for port e6329052-d285-488b-88cb-7c16a57a7be3 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1627.628893] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1627.637058] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1627.659801] env[70020]: DEBUG nova.objects.base [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=70020) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1627.688236] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daf49241-40b7-4024-a6af-3dd237c53b54 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.695753] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-255c9fdb-8fb1-4344-af24-760b596e250f {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.727227] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c72ea32-8336-4a75-8f06-2b47bd85e4d1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.734510] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66e019d9-a51f-4f6f-ae17-3c0414a4b4f3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.747102] env[70020]: DEBUG nova.compute.provider_tree [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1628.140270] env[70020]: DEBUG nova.network.neutron [req-c889a634-e9eb-4095-bf53-2a4fb6f9c339 req-91120e8d-51b8-4ca7-be0c-8e87d4cf7e4c service nova] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Updated VIF entry in instance network info cache for port e6329052-d285-488b-88cb-7c16a57a7be3. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1628.140681] env[70020]: DEBUG nova.network.neutron [req-c889a634-e9eb-4095-bf53-2a4fb6f9c339 req-91120e8d-51b8-4ca7-be0c-8e87d4cf7e4c service nova] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Updating instance_info_cache with network_info: [{"id": "e6329052-d285-488b-88cb-7c16a57a7be3", "address": "fa:16:3e:22:d8:96", "network": {"id": "e7ac5730-3f56-469f-b3b1-25a06edc1284", "bridge": null, "label": "tempest-ServerActionsTestOtherB-765496064-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b34ec8c1ad864be694a6f9ce2b8a7788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tape6329052-d2", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1628.250376] env[70020]: DEBUG nova.scheduler.client.report [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1628.643910] env[70020]: DEBUG oslo_concurrency.lockutils [req-c889a634-e9eb-4095-bf53-2a4fb6f9c339 req-91120e8d-51b8-4ca7-be0c-8e87d4cf7e4c service nova] Releasing lock "refresh_cache-b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1628.755073] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.101s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1628.757531] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.121s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1628.757718] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1628.757858] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=70020) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1628.758743] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29ec3bf9-9f54-4310-a51f-8c9f9fd72336 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.767360] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a1eb728-0537-46c5-a631-d7d562d7bf55 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.781115] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56fe3e4d-5e92-4286-8a57-fb9bcf188b44 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.787441] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94a48670-3b84-4798-adfc-45dea6690721 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.818376] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180945MB free_disk=76GB free_vcpus=48 pci_devices=None {{(pid=70020) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1628.818376] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1628.820138] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1629.268694] env[70020]: DEBUG oslo_concurrency.lockutils [None req-d2f959af-80ac-43fa-b23b-dd2c68d7384a tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 20.211s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1629.269596] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.641s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1629.269773] env[70020]: INFO nova.compute.manager [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Unshelving [ 1630.292953] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1630.339858] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Instance b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=70020) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1630.340067] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=70020) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1630.340220] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=70020) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1630.364653] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-222ce34b-95cb-4afe-a1a8-7d9124fb0118 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.372069] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08b6596d-9d43-4dc5-8dff-b3876d547eff {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.401875] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-791e3806-3971-48ad-a8e9-da79c68a12c3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.408543] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad173011-63fd-42f5-8687-fc26d076da91 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.422529] env[70020]: DEBUG nova.compute.provider_tree [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1630.925685] env[70020]: DEBUG nova.scheduler.client.report [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1631.430496] env[70020]: DEBUG nova.compute.resource_tracker [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=70020) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1631.430880] env[70020]: DEBUG oslo_concurrency.lockutils [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.612s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1631.431048] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.138s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1631.431245] env[70020]: DEBUG nova.objects.instance [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lazy-loading 'pci_requests' on Instance uuid b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1631.935750] env[70020]: DEBUG nova.objects.instance [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lazy-loading 'numa_topology' on Instance uuid b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1632.433191] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1632.433694] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1632.433817] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1632.433928] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1632.434097] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1632.434250] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1632.434394] env[70020]: DEBUG nova.compute.manager [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=70020) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1632.438029] env[70020]: INFO nova.compute.claims [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1633.472229] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c25ff737-5bd1-4280-b610-13bb4e95e603 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.479928] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ec92f38-96c3-4cf1-985c-b56f96cc9aad {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.509850] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-933c1c45-5419-4c44-89c1-7ce2f4efd7c1 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.516584] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5b1e93a-7e4c-42ac-a394-f70fcdea2032 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.529009] env[70020]: DEBUG nova.compute.provider_tree [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1634.032559] env[70020]: DEBUG nova.scheduler.client.report [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1634.538563] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.107s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1634.567401] env[70020]: INFO nova.network.neutron [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Updating port e6329052-d285-488b-88cb-7c16a57a7be3 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1635.135774] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1635.911937] env[70020]: DEBUG nova.compute.manager [req-387e15fe-5214-4177-830d-e1cab1839160 req-99216dc7-bf5e-4839-b6e2-63bcc1868bf0 service nova] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Received event network-vif-plugged-e6329052-d285-488b-88cb-7c16a57a7be3 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1635.912196] env[70020]: DEBUG oslo_concurrency.lockutils [req-387e15fe-5214-4177-830d-e1cab1839160 req-99216dc7-bf5e-4839-b6e2-63bcc1868bf0 service nova] Acquiring lock "b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1635.912368] env[70020]: DEBUG oslo_concurrency.lockutils [req-387e15fe-5214-4177-830d-e1cab1839160 req-99216dc7-bf5e-4839-b6e2-63bcc1868bf0 service nova] Lock "b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1635.912556] env[70020]: DEBUG oslo_concurrency.lockutils [req-387e15fe-5214-4177-830d-e1cab1839160 req-99216dc7-bf5e-4839-b6e2-63bcc1868bf0 service nova] Lock "b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1635.912767] env[70020]: DEBUG nova.compute.manager [req-387e15fe-5214-4177-830d-e1cab1839160 req-99216dc7-bf5e-4839-b6e2-63bcc1868bf0 service nova] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] No waiting events found dispatching network-vif-plugged-e6329052-d285-488b-88cb-7c16a57a7be3 {{(pid=70020) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1635.912917] env[70020]: WARNING nova.compute.manager [req-387e15fe-5214-4177-830d-e1cab1839160 req-99216dc7-bf5e-4839-b6e2-63bcc1868bf0 service nova] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Received unexpected event network-vif-plugged-e6329052-d285-488b-88cb-7c16a57a7be3 for instance with vm_state shelved_offloaded and task_state spawning. [ 1636.220215] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "refresh_cache-b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1636.220418] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquired lock "refresh_cache-b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1636.221031] env[70020]: DEBUG nova.network.neutron [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Building network info cache for instance {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1636.917015] env[70020]: DEBUG nova.network.neutron [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Updating instance_info_cache with network_info: [{"id": "e6329052-d285-488b-88cb-7c16a57a7be3", "address": "fa:16:3e:22:d8:96", "network": {"id": "e7ac5730-3f56-469f-b3b1-25a06edc1284", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-765496064-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b34ec8c1ad864be694a6f9ce2b8a7788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6329052-d2", "ovs_interfaceid": "e6329052-d285-488b-88cb-7c16a57a7be3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1637.419555] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Releasing lock "refresh_cache-b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1637.447978] env[70020]: DEBUG nova.virt.hardware [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-25T22:59:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='e495494ed2e4b49b8fe0b51992cc8847',container_format='bare',created_at=2025-04-25T23:17:50Z,direct_url=,disk_format='vmdk',id=01bbe943-2076-477a-875d-b4b83109b047,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-599472188-shelved',owner='b34ec8c1ad864be694a6f9ce2b8a7788',properties=ImageMetaProps,protected=,size=31668736,status='active',tags=,updated_at=2025-04-25T23:18:03Z,virtual_size=,visibility=), allow threads: False {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1637.448243] env[70020]: DEBUG nova.virt.hardware [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Flavor limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1637.448395] env[70020]: DEBUG nova.virt.hardware [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Image limits 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1637.448570] env[70020]: DEBUG nova.virt.hardware [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Flavor pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1637.448718] env[70020]: DEBUG nova.virt.hardware [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Image pref 0:0:0 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1637.448860] env[70020]: DEBUG nova.virt.hardware [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=70020) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1637.449071] env[70020]: DEBUG nova.virt.hardware [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1637.449229] env[70020]: DEBUG nova.virt.hardware [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1637.449419] env[70020]: DEBUG nova.virt.hardware [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Got 1 possible topologies {{(pid=70020) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1637.449593] env[70020]: DEBUG nova.virt.hardware [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1637.449765] env[70020]: DEBUG nova.virt.hardware [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=70020) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1637.450619] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e166683-fa40-4069-95e2-e92a9fe8b061 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.458473] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2ce22a7-b411-452d-a574-af188cad5eeb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.471292] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:22:d8:96', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1e7a4976-597e-4636-990e-6062b5faadee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e6329052-d285-488b-88cb-7c16a57a7be3', 'vif_model': 'vmxnet3'}] {{(pid=70020) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1637.478462] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1637.478678] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Creating VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1637.478867] env[70020]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-13f4acf0-7b6d-4648-a230-5fbf8c7231f5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.497085] env[70020]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1637.497085] env[70020]: value = "task-3619448" [ 1637.497085] env[70020]: _type = "Task" [ 1637.497085] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1637.505458] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619448, 'name': CreateVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.937884] env[70020]: DEBUG nova.compute.manager [req-8280e03b-fd8e-4feb-b03c-22c301dbccff req-2146c99a-547c-4f3a-a723-1f5ecbee07a2 service nova] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Received event network-changed-e6329052-d285-488b-88cb-7c16a57a7be3 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1637.938120] env[70020]: DEBUG nova.compute.manager [req-8280e03b-fd8e-4feb-b03c-22c301dbccff req-2146c99a-547c-4f3a-a723-1f5ecbee07a2 service nova] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Refreshing instance network info cache due to event network-changed-e6329052-d285-488b-88cb-7c16a57a7be3. {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1637.938296] env[70020]: DEBUG oslo_concurrency.lockutils [req-8280e03b-fd8e-4feb-b03c-22c301dbccff req-2146c99a-547c-4f3a-a723-1f5ecbee07a2 service nova] Acquiring lock "refresh_cache-b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1637.938441] env[70020]: DEBUG oslo_concurrency.lockutils [req-8280e03b-fd8e-4feb-b03c-22c301dbccff req-2146c99a-547c-4f3a-a723-1f5ecbee07a2 service nova] Acquired lock "refresh_cache-b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1637.938598] env[70020]: DEBUG nova.network.neutron [req-8280e03b-fd8e-4feb-b03c-22c301dbccff req-2146c99a-547c-4f3a-a723-1f5ecbee07a2 service nova] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Refreshing network info cache for port e6329052-d285-488b-88cb-7c16a57a7be3 {{(pid=70020) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1638.007096] env[70020]: DEBUG oslo_vmware.api [-] Task: {'id': task-3619448, 'name': CreateVM_Task, 'duration_secs': 0.301699} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.007096] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Created VM on the ESX host {{(pid=70020) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1638.007735] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01bbe943-2076-477a-875d-b4b83109b047" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1638.007893] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01bbe943-2076-477a-875d-b4b83109b047" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1638.008280] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/01bbe943-2076-477a-875d-b4b83109b047" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1638.008524] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a26aa2bb-96e7-4c26-b137-201ac034c347 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.013316] env[70020]: DEBUG oslo_vmware.api [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1638.013316] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]52c2ec8a-104d-89e9-98a4-b35cf4e32602" [ 1638.013316] env[70020]: _type = "Task" [ 1638.013316] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1638.020326] env[70020]: DEBUG oslo_vmware.api [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]52c2ec8a-104d-89e9-98a4-b35cf4e32602, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.523085] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01bbe943-2076-477a-875d-b4b83109b047" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1638.523341] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Processing image 01bbe943-2076-477a-875d-b4b83109b047 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1638.523579] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/01bbe943-2076-477a-875d-b4b83109b047/01bbe943-2076-477a-875d-b4b83109b047.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1638.523731] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquired lock "[datastore1] devstack-image-cache_base/01bbe943-2076-477a-875d-b4b83109b047/01bbe943-2076-477a-875d-b4b83109b047.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1638.523939] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1638.524202] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-13e6e4ce-b9a3-4e5c-92b3-e730910aaffe {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.532319] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1638.532502] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=70020) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1638.533209] env[70020]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad0cbf42-4151-4f5c-8ab8-2df35be07be4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.538211] env[70020]: DEBUG oslo_vmware.api [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1638.538211] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]523b25ac-12ed-edf7-3041-9a87c52e391d" [ 1638.538211] env[70020]: _type = "Task" [ 1638.538211] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1638.545297] env[70020]: DEBUG oslo_vmware.api [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': session[528c1535-3daa-a7b0-823d-982a96a72224]523b25ac-12ed-edf7-3041-9a87c52e391d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.655939] env[70020]: DEBUG nova.network.neutron [req-8280e03b-fd8e-4feb-b03c-22c301dbccff req-2146c99a-547c-4f3a-a723-1f5ecbee07a2 service nova] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Updated VIF entry in instance network info cache for port e6329052-d285-488b-88cb-7c16a57a7be3. {{(pid=70020) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1638.656336] env[70020]: DEBUG nova.network.neutron [req-8280e03b-fd8e-4feb-b03c-22c301dbccff req-2146c99a-547c-4f3a-a723-1f5ecbee07a2 service nova] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Updating instance_info_cache with network_info: [{"id": "e6329052-d285-488b-88cb-7c16a57a7be3", "address": "fa:16:3e:22:d8:96", "network": {"id": "e7ac5730-3f56-469f-b3b1-25a06edc1284", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-765496064-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b34ec8c1ad864be694a6f9ce2b8a7788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6329052-d2", "ovs_interfaceid": "e6329052-d285-488b-88cb-7c16a57a7be3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1639.048530] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Preparing fetch location {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1639.048885] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Fetch image to [datastore1] OSTACK_IMG_6c80849f-0e41-44b2-84cc-99352669aa74/OSTACK_IMG_6c80849f-0e41-44b2-84cc-99352669aa74.vmdk {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1639.048965] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Downloading stream optimized image 01bbe943-2076-477a-875d-b4b83109b047 to [datastore1] OSTACK_IMG_6c80849f-0e41-44b2-84cc-99352669aa74/OSTACK_IMG_6c80849f-0e41-44b2-84cc-99352669aa74.vmdk on the data store datastore1 as vApp {{(pid=70020) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1639.049129] env[70020]: DEBUG nova.virt.vmwareapi.images [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Downloading image file data 01bbe943-2076-477a-875d-b4b83109b047 to the ESX as VM named 'OSTACK_IMG_6c80849f-0e41-44b2-84cc-99352669aa74' {{(pid=70020) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1639.112625] env[70020]: DEBUG oslo_vmware.rw_handles [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1639.112625] env[70020]: value = "resgroup-9" [ 1639.112625] env[70020]: _type = "ResourcePool" [ 1639.112625] env[70020]: }. {{(pid=70020) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1639.112953] env[70020]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-c38737eb-bd87-4a10-a476-7c7ad9a49523 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.133444] env[70020]: DEBUG oslo_vmware.rw_handles [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lease: (returnval){ [ 1639.133444] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]522f970b-9d4b-a6c2-16c6-f4b73d42ff98" [ 1639.133444] env[70020]: _type = "HttpNfcLease" [ 1639.133444] env[70020]: } obtained for vApp import into resource pool (val){ [ 1639.133444] env[70020]: value = "resgroup-9" [ 1639.133444] env[70020]: _type = "ResourcePool" [ 1639.133444] env[70020]: }. {{(pid=70020) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1639.133692] env[70020]: DEBUG oslo_vmware.api [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the lease: (returnval){ [ 1639.133692] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]522f970b-9d4b-a6c2-16c6-f4b73d42ff98" [ 1639.133692] env[70020]: _type = "HttpNfcLease" [ 1639.133692] env[70020]: } to be ready. {{(pid=70020) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1639.139447] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1639.139447] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]522f970b-9d4b-a6c2-16c6-f4b73d42ff98" [ 1639.139447] env[70020]: _type = "HttpNfcLease" [ 1639.139447] env[70020]: } is initializing. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1639.159118] env[70020]: DEBUG oslo_concurrency.lockutils [req-8280e03b-fd8e-4feb-b03c-22c301dbccff req-2146c99a-547c-4f3a-a723-1f5ecbee07a2 service nova] Releasing lock "refresh_cache-b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1639.642119] env[70020]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1639.642119] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]522f970b-9d4b-a6c2-16c6-f4b73d42ff98" [ 1639.642119] env[70020]: _type = "HttpNfcLease" [ 1639.642119] env[70020]: } is ready. {{(pid=70020) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1639.642441] env[70020]: DEBUG oslo_vmware.rw_handles [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1639.642441] env[70020]: value = "session[528c1535-3daa-a7b0-823d-982a96a72224]522f970b-9d4b-a6c2-16c6-f4b73d42ff98" [ 1639.642441] env[70020]: _type = "HttpNfcLease" [ 1639.642441] env[70020]: }. {{(pid=70020) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1639.643149] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a67c10ff-660e-4af6-a7e8-da86d9e069c7 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.650342] env[70020]: DEBUG oslo_vmware.rw_handles [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520e9375-d227-5c0b-1ca9-a446575fabd3/disk-0.vmdk from lease info. {{(pid=70020) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1639.650516] env[70020]: DEBUG oslo_vmware.rw_handles [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Creating HTTP connection to write to file with size = 31668736 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520e9375-d227-5c0b-1ca9-a446575fabd3/disk-0.vmdk. {{(pid=70020) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1639.714462] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-89bfe31f-044c-49ba-bf90-ecb1432a713b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.826959] env[70020]: DEBUG oslo_vmware.rw_handles [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Completed reading data from the image iterator. {{(pid=70020) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1640.827400] env[70020]: DEBUG oslo_vmware.rw_handles [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520e9375-d227-5c0b-1ca9-a446575fabd3/disk-0.vmdk. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1640.828220] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eabb98c0-fc5e-49ba-85b4-bb3b60adb8d6 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.835067] env[70020]: DEBUG oslo_vmware.rw_handles [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520e9375-d227-5c0b-1ca9-a446575fabd3/disk-0.vmdk is in state: ready. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1640.835248] env[70020]: DEBUG oslo_vmware.rw_handles [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520e9375-d227-5c0b-1ca9-a446575fabd3/disk-0.vmdk. {{(pid=70020) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1640.835534] env[70020]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-20921af4-2fa3-4489-a563-3eabdeab411b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.035433] env[70020]: DEBUG oslo_vmware.rw_handles [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520e9375-d227-5c0b-1ca9-a446575fabd3/disk-0.vmdk. {{(pid=70020) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1641.035605] env[70020]: INFO nova.virt.vmwareapi.images [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Downloaded image file data 01bbe943-2076-477a-875d-b4b83109b047 [ 1641.036458] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10bbf0f7-46ac-4a9d-bdd1-6c7c36a531fb {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.052743] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f5e1c024-af70-401d-9a91-b6faee38c770 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.079333] env[70020]: INFO nova.virt.vmwareapi.images [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] The imported VM was unregistered [ 1641.081592] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Caching image {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1641.081822] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Creating directory with path [datastore1] devstack-image-cache_base/01bbe943-2076-477a-875d-b4b83109b047 {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1641.082074] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-214fecb1-d697-4f1e-b279-a6c252a54f65 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.093358] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Created directory with path [datastore1] devstack-image-cache_base/01bbe943-2076-477a-875d-b4b83109b047 {{(pid=70020) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1641.093530] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_6c80849f-0e41-44b2-84cc-99352669aa74/OSTACK_IMG_6c80849f-0e41-44b2-84cc-99352669aa74.vmdk to [datastore1] devstack-image-cache_base/01bbe943-2076-477a-875d-b4b83109b047/01bbe943-2076-477a-875d-b4b83109b047.vmdk. {{(pid=70020) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1641.093749] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-057962ce-146b-486b-bb00-88289050e370 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.099718] env[70020]: DEBUG oslo_vmware.api [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1641.099718] env[70020]: value = "task-3619451" [ 1641.099718] env[70020]: _type = "Task" [ 1641.099718] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1641.107046] env[70020]: DEBUG oslo_vmware.api [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619451, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.610421] env[70020]: DEBUG oslo_vmware.api [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619451, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.110768] env[70020]: DEBUG oslo_vmware.api [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619451, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.611272] env[70020]: DEBUG oslo_vmware.api [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619451, 'name': MoveVirtualDisk_Task} progress is 71%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.113016] env[70020]: DEBUG oslo_vmware.api [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619451, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.613753] env[70020]: DEBUG oslo_vmware.api [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619451, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.164742} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.614068] env[70020]: INFO nova.virt.vmwareapi.ds_util [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_6c80849f-0e41-44b2-84cc-99352669aa74/OSTACK_IMG_6c80849f-0e41-44b2-84cc-99352669aa74.vmdk to [datastore1] devstack-image-cache_base/01bbe943-2076-477a-875d-b4b83109b047/01bbe943-2076-477a-875d-b4b83109b047.vmdk. [ 1643.614267] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Cleaning up location [datastore1] OSTACK_IMG_6c80849f-0e41-44b2-84cc-99352669aa74 {{(pid=70020) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1643.614429] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_6c80849f-0e41-44b2-84cc-99352669aa74 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1643.614671] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0c084f51-645c-495a-88b3-1c1562c3c411 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.620605] env[70020]: DEBUG oslo_vmware.api [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1643.620605] env[70020]: value = "task-3619452" [ 1643.620605] env[70020]: _type = "Task" [ 1643.620605] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.627577] env[70020]: DEBUG oslo_vmware.api [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619452, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.131575] env[70020]: DEBUG oslo_vmware.api [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619452, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.036053} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.131887] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1644.131978] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Releasing lock "[datastore1] devstack-image-cache_base/01bbe943-2076-477a-875d-b4b83109b047/01bbe943-2076-477a-875d-b4b83109b047.vmdk" {{(pid=70020) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1644.132251] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/01bbe943-2076-477a-875d-b4b83109b047/01bbe943-2076-477a-875d-b4b83109b047.vmdk to [datastore1] b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95/b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1644.132526] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ed41eba1-8d15-4bd4-a075-86a60967a7e3 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.138735] env[70020]: DEBUG oslo_vmware.api [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1644.138735] env[70020]: value = "task-3619453" [ 1644.138735] env[70020]: _type = "Task" [ 1644.138735] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.145988] env[70020]: DEBUG oslo_vmware.api [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619453, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.649649] env[70020]: DEBUG oslo_vmware.api [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619453, 'name': CopyVirtualDisk_Task} progress is 24%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.130915] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1645.150199] env[70020]: DEBUG oslo_vmware.api [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619453, 'name': CopyVirtualDisk_Task} progress is 46%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.649817] env[70020]: DEBUG oslo_vmware.api [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619453, 'name': CopyVirtualDisk_Task} progress is 69%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.151153] env[70020]: DEBUG oslo_vmware.api [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619453, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.651888] env[70020]: DEBUG oslo_vmware.api [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619453, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.19486} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.652430] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/01bbe943-2076-477a-875d-b4b83109b047/01bbe943-2076-477a-875d-b4b83109b047.vmdk to [datastore1] b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95/b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95.vmdk {{(pid=70020) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1646.654051] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f979c70e-243b-4be7-bb8f-485b0faae8c8 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.675797] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Reconfiguring VM instance instance-00000080 to attach disk [datastore1] b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95/b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95.vmdk or device None with type streamOptimized {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1646.676050] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ee6be39b-1903-48cc-b18b-b3f254250316 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.695279] env[70020]: DEBUG oslo_vmware.api [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1646.695279] env[70020]: value = "task-3619454" [ 1646.695279] env[70020]: _type = "Task" [ 1646.695279] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.702758] env[70020]: DEBUG oslo_vmware.api [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619454, 'name': ReconfigVM_Task} progress is 5%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.205891] env[70020]: DEBUG oslo_vmware.api [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619454, 'name': ReconfigVM_Task, 'duration_secs': 0.267916} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.206242] env[70020]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Reconfigured VM instance instance-00000080 to attach disk [datastore1] b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95/b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95.vmdk or device None with type streamOptimized {{(pid=70020) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1647.206855] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-90430ead-1cbe-4892-a054-6ecfa6b60a2c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.212387] env[70020]: DEBUG oslo_vmware.api [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1647.212387] env[70020]: value = "task-3619455" [ 1647.212387] env[70020]: _type = "Task" [ 1647.212387] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.219439] env[70020]: DEBUG oslo_vmware.api [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619455, 'name': Rename_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.722469] env[70020]: DEBUG oslo_vmware.api [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619455, 'name': Rename_Task, 'duration_secs': 0.170146} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.722719] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Powering on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1647.722957] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bc066e49-9586-457d-897d-f443d4c8e3ee {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.728697] env[70020]: DEBUG oslo_vmware.api [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1647.728697] env[70020]: value = "task-3619456" [ 1647.728697] env[70020]: _type = "Task" [ 1647.728697] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.735631] env[70020]: DEBUG oslo_vmware.api [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619456, 'name': PowerOnVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.240104] env[70020]: DEBUG oslo_vmware.api [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619456, 'name': PowerOnVM_Task} progress is 100%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.740240] env[70020]: DEBUG oslo_vmware.api [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619456, 'name': PowerOnVM_Task, 'duration_secs': 0.535442} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.740495] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Powered on the VM {{(pid=70020) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1648.835189] env[70020]: DEBUG nova.compute.manager [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Checking state {{(pid=70020) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1648.836094] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-656869bf-6468-4150-b321-9c7407966853 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.352138] env[70020]: DEBUG oslo_concurrency.lockutils [None req-ac87fe98-d810-4a20-9cc0-ae7e4e77d9cb tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 20.082s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1650.351765] env[70020]: DEBUG oslo_concurrency.lockutils [None req-cb63a9c9-f16c-4f52-b022-8d4d21283757 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1650.352059] env[70020]: DEBUG oslo_concurrency.lockutils [None req-cb63a9c9-f16c-4f52-b022-8d4d21283757 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1650.352284] env[70020]: DEBUG oslo_concurrency.lockutils [None req-cb63a9c9-f16c-4f52-b022-8d4d21283757 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1650.352522] env[70020]: DEBUG oslo_concurrency.lockutils [None req-cb63a9c9-f16c-4f52-b022-8d4d21283757 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1650.352644] env[70020]: DEBUG oslo_concurrency.lockutils [None req-cb63a9c9-f16c-4f52-b022-8d4d21283757 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1650.354690] env[70020]: INFO nova.compute.manager [None req-cb63a9c9-f16c-4f52-b022-8d4d21283757 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Terminating instance [ 1650.858344] env[70020]: DEBUG nova.compute.manager [None req-cb63a9c9-f16c-4f52-b022-8d4d21283757 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Start destroying the instance on the hypervisor. {{(pid=70020) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1650.858584] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-cb63a9c9-f16c-4f52-b022-8d4d21283757 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Destroying instance {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1650.859502] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b86b5ba4-22a2-4004-a082-8c6bbf2edc82 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.867719] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb63a9c9-f16c-4f52-b022-8d4d21283757 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Powering off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1650.867982] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7eb6d8b9-adbe-466e-8358-42d0d99dd9b5 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.874269] env[70020]: DEBUG oslo_vmware.api [None req-cb63a9c9-f16c-4f52-b022-8d4d21283757 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1650.874269] env[70020]: value = "task-3619457" [ 1650.874269] env[70020]: _type = "Task" [ 1650.874269] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1650.882721] env[70020]: DEBUG oslo_vmware.api [None req-cb63a9c9-f16c-4f52-b022-8d4d21283757 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619457, 'name': PowerOffVM_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.384406] env[70020]: DEBUG oslo_vmware.api [None req-cb63a9c9-f16c-4f52-b022-8d4d21283757 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619457, 'name': PowerOffVM_Task, 'duration_secs': 0.189224} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1651.384771] env[70020]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb63a9c9-f16c-4f52-b022-8d4d21283757 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Powered off the VM {{(pid=70020) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1651.384814] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-cb63a9c9-f16c-4f52-b022-8d4d21283757 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Unregistering the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1651.385060] env[70020]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-92ceb21d-dfd9-40da-a596-f2512a57f3ab {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.446243] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-cb63a9c9-f16c-4f52-b022-8d4d21283757 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Unregistered the VM {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1651.446457] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-cb63a9c9-f16c-4f52-b022-8d4d21283757 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Deleting contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1651.446623] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb63a9c9-f16c-4f52-b022-8d4d21283757 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Deleting the datastore file [datastore1] b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95 {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1651.446912] env[70020]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cb3e1a5f-4b5a-4b8c-9273-ea35d01407d4 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.454608] env[70020]: DEBUG oslo_vmware.api [None req-cb63a9c9-f16c-4f52-b022-8d4d21283757 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for the task: (returnval){ [ 1651.454608] env[70020]: value = "task-3619459" [ 1651.454608] env[70020]: _type = "Task" [ 1651.454608] env[70020]: } to complete. {{(pid=70020) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1651.461823] env[70020]: DEBUG oslo_vmware.api [None req-cb63a9c9-f16c-4f52-b022-8d4d21283757 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619459, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.963959] env[70020]: DEBUG oslo_vmware.api [None req-cb63a9c9-f16c-4f52-b022-8d4d21283757 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Task: {'id': task-3619459, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132162} completed successfully. {{(pid=70020) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1651.964235] env[70020]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb63a9c9-f16c-4f52-b022-8d4d21283757 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Deleted the datastore file {{(pid=70020) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1651.964416] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-cb63a9c9-f16c-4f52-b022-8d4d21283757 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Deleted contents of the VM from datastore datastore1 {{(pid=70020) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1651.964588] env[70020]: DEBUG nova.virt.vmwareapi.vmops [None req-cb63a9c9-f16c-4f52-b022-8d4d21283757 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Instance destroyed {{(pid=70020) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1651.964804] env[70020]: INFO nova.compute.manager [None req-cb63a9c9-f16c-4f52-b022-8d4d21283757 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1651.965065] env[70020]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cb63a9c9-f16c-4f52-b022-8d4d21283757 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=70020) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1651.965262] env[70020]: DEBUG nova.compute.manager [-] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Deallocating network for instance {{(pid=70020) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1651.965379] env[70020]: DEBUG nova.network.neutron [-] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] deallocate_for_instance() {{(pid=70020) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1652.405318] env[70020]: DEBUG nova.compute.manager [req-84dfff4a-1a5a-4dc6-9823-a8ff65d55ac1 req-076366d9-bb52-40ed-aa38-85ec34f24ec2 service nova] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Received event network-vif-deleted-e6329052-d285-488b-88cb-7c16a57a7be3 {{(pid=70020) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1652.405651] env[70020]: INFO nova.compute.manager [req-84dfff4a-1a5a-4dc6-9823-a8ff65d55ac1 req-076366d9-bb52-40ed-aa38-85ec34f24ec2 service nova] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Neutron deleted interface e6329052-d285-488b-88cb-7c16a57a7be3; detaching it from the instance and deleting it from the info cache [ 1652.405686] env[70020]: DEBUG nova.network.neutron [req-84dfff4a-1a5a-4dc6-9823-a8ff65d55ac1 req-076366d9-bb52-40ed-aa38-85ec34f24ec2 service nova] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1652.889793] env[70020]: DEBUG nova.network.neutron [-] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Updating instance_info_cache with network_info: [] {{(pid=70020) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1652.908064] env[70020]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-259ad74a-0fa5-4339-827c-fd33aff19b02 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.918579] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00edea6c-60f1-4f41-aa97-87450a1c463b {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.943134] env[70020]: DEBUG nova.compute.manager [req-84dfff4a-1a5a-4dc6-9823-a8ff65d55ac1 req-076366d9-bb52-40ed-aa38-85ec34f24ec2 service nova] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Detach interface failed, port_id=e6329052-d285-488b-88cb-7c16a57a7be3, reason: Instance b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95 could not be found. {{(pid=70020) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1653.392796] env[70020]: INFO nova.compute.manager [-] [instance: b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95] Took 1.43 seconds to deallocate network for instance. [ 1653.899122] env[70020]: DEBUG oslo_concurrency.lockutils [None req-cb63a9c9-f16c-4f52-b022-8d4d21283757 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1653.899508] env[70020]: DEBUG oslo_concurrency.lockutils [None req-cb63a9c9-f16c-4f52-b022-8d4d21283757 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1653.899671] env[70020]: DEBUG nova.objects.instance [None req-cb63a9c9-f16c-4f52-b022-8d4d21283757 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lazy-loading 'resources' on Instance uuid b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95 {{(pid=70020) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1654.435040] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c8af790-1856-49a1-9017-d7433b3a3e91 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.442251] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-009d0702-beef-49c0-8426-ef8968358fc9 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.472715] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f904f233-3133-400b-8dec-7814fb847af2 {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.479519] env[70020]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e30c6c23-937d-4186-abb3-365a9aaf0b9c {{(pid=70020) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.492646] env[70020]: DEBUG nova.compute.provider_tree [None req-cb63a9c9-f16c-4f52-b022-8d4d21283757 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Inventory has not changed in ProviderTree for provider: ee72c483-d9d9-4e62-8f73-e9f24668500d {{(pid=70020) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1654.996117] env[70020]: DEBUG nova.scheduler.client.report [None req-cb63a9c9-f16c-4f52-b022-8d4d21283757 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Inventory has not changed for provider ee72c483-d9d9-4e62-8f73-e9f24668500d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 76, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=70020) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1655.501167] env[70020]: DEBUG oslo_concurrency.lockutils [None req-cb63a9c9-f16c-4f52-b022-8d4d21283757 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.602s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1655.521528] env[70020]: INFO nova.scheduler.client.report [None req-cb63a9c9-f16c-4f52-b022-8d4d21283757 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Deleted allocations for instance b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95 [ 1656.029810] env[70020]: DEBUG oslo_concurrency.lockutils [None req-cb63a9c9-f16c-4f52-b022-8d4d21283757 tempest-ServerActionsTestOtherB-172314067 tempest-ServerActionsTestOtherB-172314067-project-member] Lock "b18c5a49-09d3-4bb8-8018-f0aaa0a1ab95" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.678s {{(pid=70020) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1688.135105] env[70020]: DEBUG oslo_service.periodic_task [None req-c22200ee-8037-49f6-b33e-5ba8fe932f08 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=70020) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}}